From 01a80f2d2f18071f429b25f4faa52aefe026a1f5 Mon Sep 17 00:00:00 2001 From: Azaya <99359668+Azaya89@users.noreply.github.com> Date: Thu, 20 Feb 2025 11:22:37 +0100 Subject: [PATCH] docs: Update docstrings to numpydoc style (#6506) --- doc/generate_modules.py | 1 - holoviews/__init__.py | 5 +- holoviews/__version.py | 1 + holoviews/annotators.py | 55 ++- holoviews/core/accessors.py | 203 ++++---- holoviews/core/boundingregion.py | 88 ++-- holoviews/core/data/__init__.py | 481 +++++++++++-------- holoviews/core/data/array.py | 4 +- holoviews/core/data/cudf.py | 8 +- holoviews/core/data/dask.py | 16 +- holoviews/core/data/dictionary.py | 8 +- holoviews/core/data/grid.py | 17 +- holoviews/core/data/ibis.py | 4 +- holoviews/core/data/image.py | 24 +- holoviews/core/data/interface.py | 57 +-- holoviews/core/data/multipath.py | 51 +- holoviews/core/data/pandas.py | 12 +- holoviews/core/data/spatialpandas.py | 171 ++++--- holoviews/core/data/util.py | 4 +- holoviews/core/data/xarray.py | 3 +- holoviews/core/decollate.py | 55 +-- holoviews/core/dimension.py | 610 ++++++++++++++---------- holoviews/core/element.py | 206 ++++---- holoviews/core/io.py | 153 +++--- holoviews/core/layout.py | 254 ++++++---- holoviews/core/ndmapping.py | 338 ++++++++------ holoviews/core/operation.py | 36 +- holoviews/core/options.py | 300 ++++++------ holoviews/core/overlay.py | 119 +++-- holoviews/core/pprint.py | 49 +- holoviews/core/sheetcoords.py | 97 ++-- holoviews/core/spaces.py | 624 +++++++++++++++---------- holoviews/core/traversal.py | 16 +- holoviews/core/tree.py | 103 ++-- holoviews/core/util.py | 488 ++++++++++--------- holoviews/element/__init__.py | 4 +- holoviews/element/annotation.py | 206 ++++---- holoviews/element/chart.py | 67 +-- holoviews/element/chart3d.py | 16 +- holoviews/element/comparison.py | 24 +- holoviews/element/geom.py | 21 +- holoviews/element/graphs.py | 95 ++-- holoviews/element/path.py | 79 ++-- holoviews/element/raster.py | 121 ++--- holoviews/element/sankey.py | 22 +- holoviews/element/selection.py | 16 +- holoviews/element/stats.py | 116 +++-- holoviews/element/tabular.py | 20 +- holoviews/element/tiles.py | 12 +- holoviews/element/util.py | 48 +- holoviews/ipython/__init__.py | 36 +- holoviews/ipython/archive.py | 37 +- holoviews/ipython/display_hooks.py | 44 +- holoviews/ipython/magics.py | 54 ++- holoviews/ipython/preprocessors.py | 45 +- holoviews/ipython/widgets.py | 23 +- holoviews/operation/datashader.py | 149 +++--- holoviews/operation/downsample.py | 61 ++- holoviews/operation/element.py | 81 ++-- holoviews/operation/normalization.py | 20 +- holoviews/operation/resample.py | 12 +- holoviews/operation/stats.py | 12 +- holoviews/operation/timeseries.py | 16 +- holoviews/plotting/__init__.py | 4 +- holoviews/plotting/bokeh/annotation.py | 28 +- holoviews/plotting/bokeh/callbacks.py | 154 +++--- holoviews/plotting/bokeh/chart.py | 24 +- holoviews/plotting/bokeh/element.py | 144 +++--- holoviews/plotting/bokeh/geometry.py | 4 +- holoviews/plotting/bokeh/graphs.py | 20 +- holoviews/plotting/bokeh/heatmap.py | 46 +- holoviews/plotting/bokeh/hex_tiles.py | 4 +- holoviews/plotting/bokeh/links.py | 40 +- holoviews/plotting/bokeh/path.py | 12 +- holoviews/plotting/bokeh/plot.py | 76 +-- holoviews/plotting/bokeh/raster.py | 8 +- holoviews/plotting/bokeh/renderer.py | 20 +- holoviews/plotting/bokeh/sankey.py | 16 +- holoviews/plotting/bokeh/selection.py | 4 +- holoviews/plotting/bokeh/stats.py | 16 +- holoviews/plotting/bokeh/styles.py | 31 +- holoviews/plotting/bokeh/tabular.py | 8 +- holoviews/plotting/bokeh/tiles.py | 4 +- holoviews/plotting/bokeh/util.py | 192 ++++---- holoviews/plotting/links.py | 38 +- holoviews/plotting/mixins.py | 16 +- holoviews/plotting/mpl/__init__.py | 4 +- holoviews/plotting/mpl/annotation.py | 57 ++- holoviews/plotting/mpl/chart.py | 72 +-- holoviews/plotting/mpl/chart3d.py | 24 +- holoviews/plotting/mpl/element.py | 68 +-- holoviews/plotting/mpl/geometry.py | 9 +- holoviews/plotting/mpl/heatmap.py | 1 + holoviews/plotting/mpl/path.py | 4 +- holoviews/plotting/mpl/plot.py | 64 +-- holoviews/plotting/mpl/raster.py | 4 +- holoviews/plotting/mpl/renderer.py | 29 +- holoviews/plotting/mpl/sankey.py | 4 +- holoviews/plotting/mpl/stats.py | 16 +- holoviews/plotting/mpl/tabular.py | 4 +- holoviews/plotting/mpl/util.py | 72 ++- holoviews/plotting/plot.py | 193 ++++---- holoviews/plotting/plotly/callbacks.py | 4 +- holoviews/plotting/plotly/dash.py | 236 ++++++---- holoviews/plotting/plotly/element.py | 30 +- holoviews/plotting/plotly/plot.py | 20 +- holoviews/plotting/plotly/renderer.py | 13 +- holoviews/plotting/plotly/selection.py | 4 +- holoviews/plotting/plotly/tiles.py | 4 +- holoviews/plotting/plotly/util.py | 154 +++--- holoviews/plotting/renderer.py | 84 ++-- holoviews/plotting/util.py | 209 +++++---- holoviews/pyodide.py | 4 +- holoviews/selection.py | 83 ++-- holoviews/streams.py | 359 +++++++------- holoviews/util/__init__.py | 161 ++++--- holoviews/util/command.py | 4 +- holoviews/util/locator.py | 21 +- holoviews/util/parser.py | 52 ++- holoviews/util/settings.py | 40 +- holoviews/util/transform.py | 231 +++++---- holoviews/util/warnings.py | 7 +- pyproject.toml | 11 + 123 files changed, 5243 insertions(+), 4140 deletions(-) diff --git a/doc/generate_modules.py b/doc/generate_modules.py index 0b0a3ae739..79092adcab 100644 --- a/doc/generate_modules.py +++ b/doc/generate_modules.py @@ -85,7 +85,6 @@ def format_inheritance_diagram(module, package=None): def create_module_file(package, module, opts): """Build the text of the file and write the file.""" - text = format_heading(1, f'{module} Module') text += format_inheritance_diagram(package, module) text += format_heading(2, f':mod:`{module}` Module') diff --git a/holoviews/__init__.py b/holoviews/__init__.py index 6182110f4b..377be990ce 100644 --- a/holoviews/__init__.py +++ b/holoviews/__init__.py @@ -72,6 +72,7 @@ To ask the community go to https://discourse.holoviz.org/. To report issues go to https://github.com/holoviz/holoviews. + """ import builtins import os @@ -158,8 +159,7 @@ def __call__(self, *args, **kwargs): def help(obj, visualization=True, ansi=True, backend=None, recursive=False, pattern=None): - """ - Extended version of the built-in help that supports parameterized + """Extended version of the built-in help that supports parameterized functions and objects. A pattern (regular expression) may be used to filter the output and if recursive is set to True, documentation for the supplied object is shown. Note that the recursive option will @@ -167,6 +167,7 @@ def help(obj, visualization=True, ansi=True, backend=None, If ansi is set to False, all ANSI color codes are stripped out. + """ backend = backend if backend else Store.current_backend info = Store.info(obj, ansi=ansi, backend=backend, visualization=visualization, diff --git a/holoviews/__version.py b/holoviews/__version.py index f01259f510..6edbf1070a 100644 --- a/holoviews/__version.py +++ b/holoviews/__version.py @@ -1,6 +1,7 @@ """Define the package version. Called __version.py as setuptools_scm will create a _version.py + """ import os.path diff --git a/holoviews/annotators.py b/holoviews/annotators.py index d74bd86b2c..741e901dd9 100644 --- a/holoviews/annotators.py +++ b/holoviews/annotators.py @@ -19,13 +19,13 @@ def preprocess(function, current=None): - """ - Turns a param.depends watch call into a preprocessor method, i.e. + """Turns a param.depends watch call into a preprocessor method, i.e. skips all downstream events triggered by it. - NOTE: This is a temporary hack while the addition of preprocessors + NOTE : This is a temporary hack while the addition of preprocessors in param is under discussion. This only works for the first method which depends on a particular parameter. (see https://github.com/pyviz/param/issues/332) + """ if current is None: current = [] @@ -40,12 +40,12 @@ def inner(*args, **kwargs): class annotate(param.ParameterizedFunction): - """ - The annotate function allows drawing, editing and annotating any + """The annotate function allows drawing, editing and annotating any given Element (if it is supported). The annotate function returns a Layout of the editable plot and an Overlay of table(s), which allow editing the data of the element. The edited and annotated data may be accessed using the element and selected properties. + """ annotator = param.Parameter(doc="""The current Annotator instance.""") @@ -101,11 +101,14 @@ def compose(cls, *annotators): The composed Layout will contain all the elements in the supplied annotators and an overlay of all editor tables. - Args: - annotators: Annotator layouts or elements to compose + Parameters + ---------- + annotators + Annotator layouts or elements to compose - Returns: - A new layout consisting of the overlaid plots and tables + Returns + ------- + A new layout consisting of the overlaid plots and tables """ layers = [] tables = [] @@ -160,12 +163,12 @@ def __call__(self, element, **params): class Annotator(PaneBase): - """ - An Annotator allows drawing, editing and annotating a specific + """An Annotator allows drawing, editing and annotating a specific type of element. Each Annotator consists of the `plot` to draw and edit the element and the `editor`, which contains a list of tables, which make it possible to annotate each object in the element with additional properties defined in the `annotations`. + """ annotations = param.ClassSelector(default=[], class_=(dict, list), doc=""" @@ -285,11 +288,13 @@ def compose(cls, *annotators): The composed Panel will contain all the elements in the supplied Annotators and Tabs containing all editors. - Args: - annotators: Annotator objects or elements to compose + Parameters + ---------- + annotators : Annotator objects or elements to compose - Returns: - A new Panel consisting of the overlaid plots and tables + Returns + ------- + A new Panel consisting of the overlaid plots and tables """ layers, tables = [], [] for a in annotators: @@ -311,9 +316,9 @@ def selected(self): class PathAnnotator(Annotator): - """ - Annotator which allows drawing and editing Paths and associating + """Annotator which allows drawing and editing Paths and associating values with each path and each vertex of a path using a table. + """ edit_vertices = param.Boolean(default=True, doc=""" @@ -437,9 +442,9 @@ def selected(self): class PolyAnnotator(PathAnnotator): - """ - Annotator which allows drawing and editing Polygons and associating + """Annotator which allows drawing and editing Polygons and associating values with each polygon and each vertex of a Polygon using a table. + """ object = param.ClassSelector(class_=Polygons, doc=""" @@ -485,9 +490,9 @@ def _process_element(self, object): class PointAnnotator(_GeomAnnotator): - """ - Annotator which allows drawing and editing Points and associating + """Annotator which allows drawing and editing Points and associating values with each point using a table. + """ default_opts = param.Dict(default={'responsive': True, 'min_height': 400, @@ -502,9 +507,9 @@ class PointAnnotator(_GeomAnnotator): class CurveAnnotator(_GeomAnnotator): - """ - Annotator which allows editing a Curve element and associating values + """Annotator which allows editing a Curve element and associating values with each vertex using a Table. + """ default_opts = param.Dict(default={'responsive': True, 'min_height': 400, @@ -528,9 +533,9 @@ def _init_stream(self): class RectangleAnnotator(_GeomAnnotator): - """ - Annotator which allows drawing and editing Rectangles and associating + """Annotator which allows drawing and editing Rectangles and associating values with each point using a table. + """ object = param.ClassSelector(class_=Rectangles, doc=""" diff --git a/holoviews/core/accessors.py b/holoviews/core/accessors.py index 278b41611b..042631875b 100644 --- a/holoviews/core/accessors.py +++ b/holoviews/core/accessors.py @@ -1,5 +1,5 @@ -""" -Module for accessor objects for viewable HoloViews objects. +"""Module for accessor objects for viewable HoloViews objects. + """ import copy from functools import wraps @@ -80,9 +80,8 @@ def pipelined_call(*args, **kwargs): class Apply(metaclass=AccessorPipelineMeta): - """ - Utility to apply a function or operation to all viewable elements - inside the object. + """Utility to apply a function or operation to all viewable elements inside the object. + """ def __init__(self, obj, mode=None): @@ -97,34 +96,36 @@ def __call__(self, apply_function, streams=None, link_inputs=True, supplied the returned object will dynamically update in response to changes in those objects. - Args: - apply_function: A callable function - The function will be passed the return value of the - DynamicMap as the first argument and any supplied - stream values or keywords as additional keyword - arguments. - streams (list, optional): A list of Stream objects - The Stream objects can dynamically supply values which - will be passed to the function as keywords. - link_inputs (bool, optional): Whether to link the inputs - Determines whether Streams and Links attached to - original object will be inherited. - link_dataset (bool, optional): Whether to link the dataset - Determines whether the dataset will be inherited. - dynamic (bool, optional): Whether to make object dynamic - By default object is made dynamic if streams are - supplied, an instance parameter is supplied as a - keyword argument, or the supplied function is a - parameterized method. - per_element (bool, optional): Whether to apply per element - By default apply works on the leaf nodes, which - includes both elements and overlays. If set it will - apply directly to elements. - kwargs (dict, optional): Additional keyword arguments - Keyword arguments which will be supplied to the - function. - - Returns: + Parameters + ---------- + apply_function : A callable function + The function will be passed the return value of the + DynamicMap as the first argument and any supplied + stream values or keywords as additional keyword + arguments. + streams : list, optional + The Stream objects can dynamically supply values which + will be passed to the function as keywords. + link_inputs : bool, optional + Determines whether Streams and Links attached to + original object will be inherited. + link_dataset : bool, optional + Determines whether the dataset will be inherited. + dynamic : bool, optional + By default object is made dynamic if streams are + supplied, an instance parameter is supplied as a + keyword argument, or the supplied function is a + parameterized method. + per_element : bool, optional + Whether to apply per element. + By default apply works on the leaf nodes, which + includes both elements and overlays. If set it will + apply directly to elements. + **kwargs : dict, optional + Keyword arguments which will be supplied to the function. + + Returns + ------- A new object where the function was applied to all contained (Nd)Overlay or Element objects. """ @@ -214,7 +215,9 @@ def apply_function(object, **kwargs): def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): """Applies a aggregate function to all ViewableElements. - See :py:meth:`Dimensioned.aggregate` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dimensioned.aggregate` and :py:meth:`Apply.__call__` for more information. """ kwargs['_method_args'] = (dimensions, function, spreadfn) @@ -224,7 +227,9 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): def opts(self, *args, **kwargs): """Applies options to all ViewableElement objects. - See :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` for more information. """ from ..streams import Params @@ -241,7 +246,9 @@ def opts(self, *args, **kwargs): def reduce(self, dimensions=None, function=None, spreadfn=None, **kwargs): """Applies a reduce function to all ViewableElement objects. - See :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` for more information. """ if dimensions is None: @@ -253,7 +260,9 @@ def reduce(self, dimensions=None, function=None, spreadfn=None, **kwargs): def sample(self, samples=None, bounds=None, **kwargs): """Samples element values at supplied coordinates. - See :py:meth:`Dataset.sample` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dataset.sample` and :py:meth:`Apply.__call__` for more information. """ if samples is None: @@ -265,7 +274,9 @@ def sample(self, samples=None, bounds=None, **kwargs): def select(self, **kwargs): """Applies a selection to all ViewableElement objects. - See :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dimensioned.opts` and :py:meth:`Apply.__call__` for more information. """ return self.__call__('select', **kwargs) @@ -273,7 +284,9 @@ def select(self, **kwargs): def transform(self, *args, **kwargs): """Applies transforms to all Datasets. - See :py:meth:`Dataset.transform` and :py:meth:`Apply.__call__` + See Also + -------- + :py:meth:`Dataset.transform` and :py:meth:`Apply.__call__` for more information. """ from ..streams import Params @@ -290,9 +303,9 @@ def transform(self, *args, **kwargs): class Redim(metaclass=AccessorPipelineMeta): - """ - Utility that supports re-dimensioning any HoloViews object via the + """Utility that supports re-dimensioning any HoloViews object via the redim method. + """ def __init__(self, obj, mode=None): @@ -307,12 +320,17 @@ def __str__(self): def replace_dimensions(cls, dimensions, overrides): """Replaces dimensions in list with dictionary of overrides. - Args: - dimensions: List of dimensions - overrides: Dictionary of dimension specs indexed by name - - Returns: - list: List of dimensions with replacements applied + Parameters + ---------- + dimensions : list + List of dimensions + overrides : dict + Dictionary of dimension specs indexed by name + + Returns + ------- + list + List of dimensions with replacements applied """ from .dimension import Dimension @@ -342,9 +360,9 @@ def replace_dimensions(cls, dimensions, overrides): def _filter_cache(self, dmap, kdims): - """ - Returns a filtered version of the DynamicMap cache leaving only + """Returns a filtered version of the DynamicMap cache leaving only keys consistently with the newly specified values + """ filtered = [] for key, value in dmap.data.items(): @@ -397,11 +415,13 @@ def _transform_expression(expression): return _transform_expression def __call__(self, specs=None, **dimensions): - """ - Replace dimensions on the dataset and allows renaming - dimensions in the dataset. Dimension mapping should map - between the old dimension name and a dictionary of the new - attributes, a completely new dimension or a new string name. + """Replace dimensions on the dataset and allows renaming + dimensions in the dataset. + + Dimension mapping should map between the old dimension name + and a dictionary of the new attributes, + a completely new dimension or a new string name. + """ obj = self._obj redimmed = obj @@ -501,14 +521,19 @@ def __init__(self, obj, mode=None): def get(self, group=None, backend=None, defaults=True): """Returns the corresponding Options object. - Args: - group: The options group. Flattens across groups if None. - backend: Current backend if None otherwise chosen backend. - defaults: Whether to include default option values - - Returns: - Options object associated with the object containing the - applied option keywords. + Parameters + ---------- + group : The options group, optional + Flattens across groups if None. + backend : optional + Current backend if None otherwise chosen backend. + defaults : bool, optional + Whether to include default option values + + Returns + ------- + Options object associated with the object containing the + applied option keywords. """ from .options import Options, Store keywords = {} @@ -540,26 +565,33 @@ def __call__(self, *args, **kwargs): obj.opts({'Image': dict(cmap='viridis', show_title=False)}) - Args: - *args: Sets of options to apply to object - Supports a number of formats including lists of Options - objects, a type[.group][.label] followed by a set of - keyword options to apply and a dictionary indexed by - type[.group][.label] specs. - backend (optional): Backend to apply options to - Defaults to current selected backend - clone (bool, optional): Whether to clone object - Options can be applied in place with clone=False - **kwargs: Keywords of options - Set of options to apply to the object - + Parameters + ---------- + *args + Sets of options to apply to object. + Supports a number of formats including lists of Options + objects, a type[.group][.label] followed by a set of + keyword options to apply and a dictionary indexed by + type[.group][.label] specs. + backend : optional + Backend to apply options to + Defaults to current selected backend + clone : bool, optional + Whether to clone object + Options can be applied in place with clone=False + **kwargs : Keywords of options + Set of options to apply to the object + + Notes + ----- For backwards compatibility, this method also supports the option group semantics now offered by the hv.opts.apply_groups utility. This usage will be deprecated and for more information see the apply_options_type docstring. - Returns: - Returns the object or a clone with the options applied + Returns + ------- + Returns the object or a clone with the options applied """ if not(args) and not(kwargs): return self._obj @@ -585,19 +617,24 @@ def _dispatch_opts(self, *args, **kwargs): def clear(self, clone=False): """Clears any options applied to the object. - Args: - clone: Whether to return a cleared clone or clear inplace + Parameters + ---------- + clone : bool + Whether to return a cleared clone or clear inplace - Returns: - The object cleared of any options applied to it + Returns + ------- + The object cleared of any options applied to it """ return self._obj.opts(clone=clone) def info(self, show_defaults=False): """Prints a repr of the object including any applied options. - Args: - show_defaults: Whether to include default options + Parameters + ---------- + show_defaults : bool + Whether to include default options """ pprinter = PrettyPrinter(show_options=True, show_defaults=show_defaults) print(pprinter.pprint(self._obj)) diff --git a/holoviews/core/boundingregion.py b/holoviews/core/boundingregion.py index 1394ec7825..575c189d80 100644 --- a/holoviews/core/boundingregion.py +++ b/holoviews/core/boundingregion.py @@ -1,7 +1,7 @@ -""" -Bounding regions and bounding boxes. +"""Bounding regions and bounding boxes. File originally part of the Topographica project. + """ ### JABALERT: The aarect information should probably be rewritten in ### matrix notation, not list notation, so that it can be scaled, @@ -13,11 +13,12 @@ class BoundingRegion: - """ - Abstract bounding region class, for any portion of a 2D plane. + """Abstract bounding region class, for any portion of a 2D plane. Only subclasses can be instantiated directly. + """ + __abstract = True __slots__ = ['_aarect'] @@ -50,8 +51,8 @@ def aarect(self): def centroid(self): - """ - Return the coordinates of the center of this BoundingBox + """Return the coordinates of the center of this BoundingBox + """ return self.aarect().centroid() @@ -76,19 +77,20 @@ def __setstate__(self, state): class BoundingBox(BoundingRegion): - """ - A rectangular bounding box defined either by two points forming + """A rectangular bounding box defined either by two points forming an axis-aligned rectangle (or simply a radius for a square). + """ + __slots__ = [] def __str__(self): - """ - Return BoundingBox(points=((left,bottom),(right,top))) + """Return BoundingBox(points=((left,bottom),(right,top))) Reimplemented here so that 'print' for a BoundingBox will display the bounds. + """ l, b, r, t = self._aarect.lbrt() if (not isinstance(r, datetime_types) and r == -l and @@ -113,13 +115,13 @@ def script_repr(self, imports=None, prefix=" "): def __init__(self, **args): - """ - Create a BoundingBox. + """Create a BoundingBox. Either 'radius' or 'points' can be specified for the AARectangle. If neither radius nor points is passed in, create a default AARectangle defined by (-0.5,-0.5),(0.5,0.5). + """ # if present, 'radius', 'min_radius', and 'points' are deleted from # args before they're passed to the superclass (because they @@ -147,30 +149,30 @@ def __contains__(self, other): def contains(self, x, y): - """ - Returns true if the given point is contained within the + """Returns true if the given point is contained within the bounding box, where all boundaries of the box are considered to be inclusive. + """ left, bottom, right, top = self.aarect().lbrt() return (left <= x <= right) and (bottom <= y <= top) def contains_exclusive(self, x, y): - """ - Return True if the given point is contained within the + """Return True if the given point is contained within the bounding box, where the bottom and right boundaries are considered exclusive. + """ left, bottom, right, top = self._aarect.lbrt() return (left <= x < right) and (bottom < y <= top) def containsbb_exclusive(self, x): - """ - Returns true if the given BoundingBox x is contained within the + """Returns true if the given BoundingBox x is contained within the bounding box, where at least one of the boundaries of the box has to be exclusive. + """ left, bottom, right, top = self.aarect().lbrt() leftx, bottomx, rightx, topx = x.aarect().lbrt() @@ -179,9 +181,9 @@ def containsbb_exclusive(self, x): def containsbb_inclusive(self, x): - """ - Returns true if the given BoundingBox x is contained within the + """Returns true if the given BoundingBox x is contained within the bounding box, including cases of exact match. + """ left, bottom, right, top = self.aarect().lbrt() leftx, bottomx, rightx, topx = x.aarect().lbrt() @@ -190,12 +192,12 @@ def containsbb_inclusive(self, x): def upperexclusive_contains(self, x, y): - """ - Returns true if the given point is contained within the + """Returns true if the given point is contained within the bounding box, where the right and upper boundaries are exclusive, and the left and lower boundaries are inclusive. Useful for tiling a plane into non-overlapping regions. + """ left, bottom, right, top = self.aarect().lbrt() return (left <= x < right) and (bottom <= y < top) @@ -206,8 +208,8 @@ def aarect(self): def lbrt(self): - """ - return left,bottom,right,top values for the BoundingBox. + """Return left,bottom,right,top values for the BoundingBox. + """ return self._aarect.lbrt() @@ -221,10 +223,11 @@ def __eq__(self, other): class BoundingEllipse(BoundingBox): - """ - Similar to BoundingBox, but the region is the ellipse + """Similar to BoundingBox, but the region is the ellipse inscribed within the rectangle. + """ + __slots__ = [] @@ -245,14 +248,17 @@ def contains(self, x, y): # and use the slot itself instead. ################################################### class AARectangle: - """ - Axis-aligned rectangle class. + """Axis-aligned rectangle class. Defines the smallest axis-aligned rectangle that encloses a set of points. - Usage: aar = AARectangle( (x1,y1),(x2,y2), ... , (xN,yN) ) + Example + ------- + >>> aar = AARectangle( (x1,y1),(x2,y2), ... , (xN,yN) ) + """ + __slots__ = ['_bottom', '_left', '_right', '_top'] @@ -278,33 +284,43 @@ def __setstate__(self, state): def top(self): - """Return the y-coordinate of the top of the rectangle.""" + """Return the y-coordinate of the top of the rectangle. + + """ return self._top def bottom(self): - """Return the y-coordinate of the bottom of the rectangle.""" + """Return the y-coordinate of the bottom of the rectangle. + + """ return self._bottom def left(self): - """Return the x-coordinate of the left side of the rectangle.""" + """Return the x-coordinate of the left side of the rectangle. + + """ return self._left def right(self): - """Return the x-coordinate of the right side of the rectangle.""" + """Return the x-coordinate of the right side of the rectangle. + + """ return self._right def lbrt(self): - """Return (left,bottom,right,top) as a tuple.""" + """Return (left,bottom,right,top) as a tuple. + + """ return self._left, self._bottom, self._right, self._top def centroid(self): - """ - Return the centroid of the rectangle. + """Return the centroid of the rectangle. + """ left, bottom, right, top = self.lbrt() return (right + left) / 2.0, (top + bottom) / 2.0 diff --git a/holoviews/core/data/__init__.py b/holoviews/core/data/__init__.py index 73b4d3e54d..027b401f42 100644 --- a/holoviews/core/data/__init__.py +++ b/holoviews/core/data/__init__.py @@ -53,21 +53,25 @@ def concat(datasets, datatype=None): for each dimension being concatenated along and then hierarchically concatenates along each dimension. - Args: - datasets: NdMapping of Datasets to concatenate - datatype: Datatype to cast data to before concatenation - - Returns: - Concatenated dataset + Parameters + ---------- + datasets + NdMapping of Datasets to concatenate + datatype + Datatype to cast data to before concatenation + + Returns + ------- + Concatenated dataset """ return Interface.concatenate(datasets, datatype) class DataConversion: - """ - DataConversion is a very simple container object which can be + """DataConversion is a very simple container object which can be given an existing Dataset Element and provides methods to convert the Dataset into most other Element types. + """ def __init__(self, element): @@ -75,13 +79,13 @@ def __init__(self, element): def __call__(self, new_type, kdims=None, vdims=None, groupby=None, sort=False, **kwargs): - """ - Generic conversion method for Dataset based Element + """Generic conversion method for Dataset based Element types. Supply the Dataset Element type to convert to and optionally the key dimensions (kdims), value dimensions (vdims) and the dimensions. to group over. Converted Columns can be automatically sorted via the sort option and kwargs can be passed through. + """ element_params = new_type.param.objects() kdim_param = element_params['kdims'] @@ -151,8 +155,8 @@ def __call__(self, new_type, kdims=None, vdims=None, groupby=None, @contextmanager def disable_pipeline(): - """ - Disable PipelineMeta class from storing pipelines. + """Disable PipelineMeta class from storing pipelines. + """ PipelineMeta.disable = True try: @@ -231,8 +235,7 @@ def pipelined_fn(*args, **kwargs): class Dataset(Element, metaclass=PipelineMeta): - """ - Dataset provides a general baseclass for Element types that + """Dataset provides a general baseclass for Element types that contain structured data and supports a range of data formats. The Dataset class supports various methods offering a consistent @@ -240,6 +243,7 @@ class Dataset(Element, metaclass=PipelineMeta): format used. These operations include indexing, selection and various ways of aggregating or collapsing the data with a supplied function. + """ datatype = param.List(default=datatypes, doc=""" @@ -267,9 +271,9 @@ class Dataset(Element, metaclass=PipelineMeta): interface: Interface def __new__(cls, data=None, kdims=None, vdims=None, **kwargs): - """ - Allows casting a DynamicMap to an Element class like hv.Curve, by applying the + """Allows casting a DynamicMap to an Element class like hv.Curve, by applying the class to each underlying element. + """ if isinstance(data, DynamicMap): class_name = cls.__name__ @@ -364,7 +368,9 @@ def __init__(self, data, kdims=None, vdims=None, **kwargs): self._dataset._binned = self._binned def __getstate__(self): - "Ensures pipelines are dropped" + """Ensures pipelines are dropped + + """ obj_dict = super().__getstate__() if '_pipeline' in obj_dict: pipeline = obj_dict['_pipeline'] @@ -379,8 +385,8 @@ def redim(self): @property def dataset(self): - """ - The Dataset that this object was created from + """The Dataset that this object was created from + """ if self._dataset is None: if type(self) is Dataset: @@ -397,34 +403,34 @@ def dataset(self): @property def pipeline(self): - """ - Chain operation that evaluates the sequence of operations that was + """Chain operation that evaluates the sequence of operations that was used to create this object, starting with the Dataset stored in dataset property + """ return self._pipeline def compute(self): - """ - Computes the data to a data format that stores the daata in + """Computes the data to a data format that stores the daata in memory, e.g. a Dask dataframe or array is converted to a Pandas DataFrame or NumPy array. - Returns: - Dataset with the data stored in in-memory format + Returns + ------- + Dataset with the data stored in in-memory format """ return self.interface.compute(self) def persist(self): - """ - Persists the results of a lazy data interface to memory to + """Persists the results of a lazy data interface to memory to speed up data manipulation and visualization. If the particular data backend already holds the data in memory this is a no-op. Unlike the compute method this maintains the same data type. - Returns: - Dataset with the data persisted to memory + Returns + ------- + Dataset with the data persisted to memory """ persisted = self.interface.persist(self) if persisted.interface is self.interface: @@ -435,15 +441,21 @@ def persist(self): def closest(self, coords=None, **kwargs): """Snaps coordinate(s) to closest coordinate in Dataset - Args: - coords: List of coordinates expressed as tuples - **kwargs: Coordinates defined as keyword pairs - - Returns: - List of tuples of the snapped coordinates - - Raises: - NotImplementedError: Raised if snapping is not supported + Parameters + ---------- + coords + List of coordinates expressed as tuples + **kwargs + Coordinates defined as keyword pairs + + Returns + ------- + List of tuples of the snapped coordinates + + Raises + ------ + NotImplementedError + Raised if snapping is not supported """ if coords is None: coords = [] @@ -466,15 +478,18 @@ def closest(self, coords=None, **kwargs): def sort(self, by=None, reverse=False): - """ - Sorts the data by the values along the supplied dimensions. - - Args: - by: Dimension(s) to sort by - reverse (bool, optional): Reverse sort order - - Returns: - Sorted Dataset + """Sorts the data by the values along the supplied dimensions. + + Parameters + ---------- + by + Dimension(s) to sort by + reverse : bool, optional + Reverse sort order + + Returns + ------- + Sorted Dataset """ if by is None: by = self.kdims @@ -487,15 +502,20 @@ def sort(self, by=None, reverse=False): def range(self, dim, data_range=True, dimension_range=True): """Return the lower and upper bounds of values along dimension. - Args: - dimension: The dimension to compute the range on. - data_range (bool): Compute range from data values - dimension_range (bool): Include Dimension ranges - Whether to include Dimension range and soft_range - in range calculation - - Returns: - Tuple containing the lower and upper bound + Parameters + ---------- + dim + The dimension to compute the range on. + data_range : bool + Compute range from data values + dimension_range : bool + Include Dimension ranges + Whether to include Dimension range and soft_range + in range calculation + + Returns + ------- + Tuple containing the lower and upper bound """ dim = self.get_dimension(dim) @@ -519,14 +539,22 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): the key dimensions and a key value scalar or array of values, matching the length or shape of the Dataset. - Args: - dimension: Dimension or dimension spec to add - dim_pos (int): Integer index to insert dimension at - dim_val (scalar or ndarray): Dimension value(s) to add - vdim: Disabled, this type does not have value dimensions - **kwargs: Keyword arguments passed to the cloned element - Returns: - Cloned object containing the new dimension + Parameters + ---------- + dimension + Dimension or dimension spec to add + dim_pos : int + Integer index to insert dimension at + dim_val : scalar or ndarray + Dimension value(s) to add + vdim + Disabled, this type does not have value dimensions + **kwargs + Keyword arguments passed to the cloned element + + Returns + ------- + Cloned object containing the new dimension """ if isinstance(dimension, (str, tuple)): dimension = Dimension(dimension) @@ -582,27 +610,29 @@ def select(self, selection_expr=None, selection_specs=None, **selection): from holoviews import dim ds.select(selection_expr=dim('x') % 2 == 0) - Args: - selection_expr: holoviews.dim predicate expression - specifying selection. - selection_specs: List of specs to match on - A list of types, functions, or type[.group][.label] - strings specifying which objects to apply the - selection on. - **selection: Dictionary declaring selections by dimension - Selections can be scalar values, tuple ranges, lists - of discrete values and boolean arrays - - Returns: - Returns an Dimensioned object containing the selected data - or a scalar if a single value was selected + Parameters + ---------- + selection_expr : holoviews.dim predicate expression + specifying selection. + selection_specs : List of specs to match on + A list of types, functions, or type[.group][.label] + strings specifying which objects to apply the + selection on. + **selection: Dictionary declaring selections by dimension + Selections can be scalar values, tuple ranges, lists + of discrete values and boolean arrays + + Returns + ------- + Returns an Dimensioned object containing the selected data + or a scalar if a single value was selected """ from ...util.transform import dim if selection_expr is not None and not isinstance(selection_expr, dim): raise ValueError("""\ -The first positional argument to the Dataset.select method is expected to be a -holoviews.util.transform.dim expression. Use the selection_specs keyword -argument to specify a selection specification""") + The first positional argument to the Dataset.select method is expected to be a + holoviews.util.transform.dim expression. Use the selection_specs keyword + argument to specify a selection specification""") if selection_specs is not None and not isinstance(selection_specs, (list, tuple)): selection_specs = [selection_specs] @@ -635,12 +665,16 @@ def reindex(self, kdims=None, vdims=None): Creates a new object with a reordered or reduced set of key dimensions. By default drops all non-varying key dimensions.x - Args: - kdims (optional): New list of key dimensionsx - vdims (optional): New list of value dimensions + Parameters + ---------- + kdims : optional + New list of key dimensionsx + vdims : optional + New list of value dimensions - Returns: - Reindexed object + Returns + ------- + Reindexed object """ gridded = self.interface.gridded scalars = [] @@ -677,8 +711,7 @@ def reindex(self, kdims=None, vdims=None): def __getitem__(self, slices): - """ - Allows slicing and selecting values in the Dataset object. + """Allows slicing and selecting values in the Dataset object. Supports multiple indexing modes: (1) Slicing and indexing along the values of each dimension @@ -691,6 +724,7 @@ def __getitem__(self, slices): value dimension by name. (4) A boolean array index matching the length of the Dataset object. + """ slices = core_util.process_ellipses(self, slices, vdim_selection=True) if getattr(getattr(slices, 'dtype', None), 'kind', None) == 'b': @@ -733,24 +767,29 @@ def sample(self, samples=None, bounds=None, closest=True, **kwargs): Sampling a range or grid of coordinates, e.g.: - 1D: ds.sample(3) - 2D: ds.sample((3, 3)) + 1D : ds.sample(3) + 2D : ds.sample((3, 3)) Sampling by keyword, e.g.: ds.sample(x=0) - Args: - samples: List of nd-coordinates to sample - bounds: Bounds of the region to sample - Defined as two-tuple for 1D sampling and four-tuple - for 2D sampling. - closest: Whether to snap to closest coordinates - **kwargs: Coordinates specified as keyword pairs - Keywords of dimensions and scalar coordinates - - Returns: - Element containing the sampled coordinates + Parameters + ---------- + samples : List of nd-coordinates to sample + bounds + Bounds of the region to sample + Defined as two-tuple for 1D sampling and four-tuple + for 2D sampling. + closest + Whether to snap to closest coordinates + **kwargs + Coordinates specified as keyword pairs + Keywords of dimensions and scalar coordinates + + Returns + ------- + Element containing the sampled coordinates """ if samples is None: samples = [] @@ -843,19 +882,25 @@ def reduce(self, dimensions=None, function=None, spreadfn=None, **reductions): ds.reduce(x=np.mean) - Args: - dimensions: Dimension(s) to apply reduction on - Defaults to all key dimensions - function: Reduction operation to apply, e.g. numpy.mean - spreadfn: Secondary reduction to compute value spread - Useful for computing a confidence interval, spread, or - standard deviation. - **reductions: Keyword argument defining reduction - Allows reduction to be defined as keyword pair of - dimension and function - - Returns: - The Dataset after reductions have been applied. + Parameters + ---------- + dimensions + Dimension(s) to apply reduction on + Defaults to all key dimensions + function + Reduction operation to apply, e.g. numpy.mean + spreadfn + Secondary reduction to compute value spread + Useful for computing a confidence interval, spread, or + standard deviation. + **reductions + Keyword argument defining reduction + Allows reduction to be defined as keyword pair of + dimension and function + + Returns + ------- + The Dataset after reductions have been applied. """ if dimensions is None: dimensions = [] @@ -873,19 +918,25 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): function or dim_transform specified as a tuple of the transformed dimension name and dim transform. - Args: - dimensions: Dimension(s) to aggregate on - Default to all key dimensions - function: Aggregation function or transform to apply - Supports both simple functions and dimension transforms - spreadfn: Secondary reduction to compute value spread - Useful for computing a confidence interval, spread, or - standard deviation. - **kwargs: Keyword arguments either passed to the aggregation function - or to create new names for the transformed variables - - Returns: - Returns the aggregated Dataset + Parameters + ---------- + dimensions + Dimension(s) to aggregate on + Default to all key dimensions + function + Aggregation function or transform to apply + Supports both simple functions and dimension transforms + spreadfn + Secondary reduction to compute value spread + Useful for computing a confidence interval, spread, or + standard deviation. + **kwargs + Keyword arguments either passed to the aggregation function + or to create new names for the transformed variables + + Returns + ------- + Returns the aggregated Dataset """ from ...util.transform import dim if dimensions is None: dimensions = self.kdims @@ -956,16 +1007,23 @@ def groupby(self, dimensions=None, container_type=HoloMap, group_type=None, returning an object of type container_type (expected to be dictionary-like) containing the groups. - Args: - dimensions: Dimension(s) to group by - container_type: Type to cast group container to - group_type: Type to cast each group to - dynamic: Whether to return a DynamicMap - **kwargs: Keyword arguments to pass to each group - - Returns: - Returns object of supplied container_type containing the - groups. If dynamic=True returns a DynamicMap instead. + Parameters + ---------- + dimensions + Dimension(s) to group by + container_type + Type to cast group container to + group_type + Type to cast each group to + dynamic + Whether to return a DynamicMap + **kwargs + Keyword arguments to pass to each group + + Returns + ------- + Returns object of supplied container_type containing the + groups. If dynamic=True returns a DynamicMap instead. """ if dimensions is None: dimensions = [] @@ -1012,19 +1070,25 @@ def transform(self, *args, **kwargs): one in which case it will be added as an additional value dimension. - Args: - args: Specify the output arguments and transforms as a - tuple of dimension specs and dim transforms - drop (bool): Whether to drop all variables not part of the transform - keep_index (bool): Whether to keep indexes - Whether to apply transform on datastructure with - index, e.g. pandas.Series or xarray.DataArray, - (important for dask datastructures where index may - be required to align datasets). - kwargs: Specify new dimensions in the form new_dim=dim_transform - - Returns: - Transformed dataset with new dimensions + Parameters + ---------- + args + Specify the output arguments and transforms as a + tuple of dimension specs and dim transforms + drop : bool + Whether to drop all variables not part of the transform + keep_index : bool + Whether to keep indexes + Whether to apply transform on datastructure with + index, e.g. pandas.Series or xarray.DataArray, + (important for dask datastructures where index may + be required to align datasets). + kwargs + Specify new dimensions in the form new_dim=dim_transform + + Returns + ------- + Transformed dataset with new dimensions """ drop = kwargs.pop('drop', False) keep_index = kwargs.pop('keep_index', True) @@ -1065,34 +1129,45 @@ def transform(self, *args, **kwargs): return ds.clone(data, kdims=kdims, vdims=ds.vdims+new_dims) def __len__(self): - "Number of values in the Dataset." + """Number of values in the Dataset. + + """ return self.interface.length(self) def __bool__(self): - "Whether the Dataset contains any values" + """Whether the Dataset contains any values + + """ return self.interface.nonzero(self) @property def shape(self): - "Returns the shape of the data." + """Returns the shape of the data. + + """ return self.interface.shape(self) def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar: If false returns unique values + * Geometry: If false returns scalar values per geometry + * Gridded: If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ dim = self.get_dimension(dimension, strict=True) values = self.interface.values(self, dim, expanded, flat) @@ -1107,11 +1182,14 @@ def get_dimension_type(self, dim): Type is determined by Dimension.type attribute or common type of the dimension values, otherwise None. - Args: - dimension: Dimension to look up by name or by index + Parameters + ---------- + dimension + Dimension to look up by name or by index - Returns: - Declared type of values along the dimension + Returns + ------- + Declared type of values along the dimension """ dim_obj = self.get_dimension(dim) if dim_obj and dim_obj.type is not None: @@ -1125,12 +1203,16 @@ def dframe(self, dimensions=None, multi_index=False): Returns a pandas dataframe of columns along each dimension, either completely flat or indexed by key dimensions. - Args: - dimensions: Dimensions to return as columns - multi_index: Convert key dimensions to (multi-)index + Parameters + ---------- + dimensions + Dimensions to return as columns + multi_index + Convert key dimensions to (multi-)index - Returns: - DataFrame of columns corresponding to each dimension + Returns + ------- + DataFrame of columns corresponding to each dimension """ if dimensions is None: dimensions = [d.name for d in self.dimensions()] @@ -1148,11 +1230,14 @@ def columns(self, dimensions=None): Returns a dictionary of column arrays along each dimension of the element. - Args: - dimensions: Dimensions to return as columns + Parameters + ---------- + dimensions + Dimensions to return as columns - Returns: - Dictionary of arrays for each dimension + Returns + ------- + Dictionary of arrays for each dimension """ if dimensions is None: dimensions = self.dimensions() @@ -1163,7 +1248,9 @@ def columns(self, dimensions=None): @property def to(self): - "Returns the conversion interface with methods to convert Dataset" + """Returns the conversion interface with methods to convert Dataset + + """ return self._conversion_interface(self) @@ -1171,18 +1258,26 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - link (bool, optional): Whether clone should be linked - Determines whether Streams and Links attached to - original object will be inherited. - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + link : bool, optional + Whether clone should be linked + Determines whether Streams and Links attached to + original object will be inherited. + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned object """ if 'datatype' not in overrides: datatypes = [self.interface.datatype, *self.datatype] @@ -1224,7 +1319,7 @@ def iloc(self): Allow selection by integer index, slice and list of integer indices and boolean arrays. - Examples: + Examples : * Index the first row and column: @@ -1254,7 +1349,7 @@ def ndloc(self): be indexed with ``image.ndloc[iy, ix]``, where ``iy`` and ``ix`` are integer indices along the y and x dimensions. - Examples: + Examples : * Index value in 2D array: diff --git a/holoviews/core/data/array.py b/holoviews/core/data/array.py index 364a74794c..6af4eda4f7 100644 --- a/holoviews/core/data/array.py +++ b/holoviews/core/data/array.py @@ -229,9 +229,9 @@ def sample(cls, dataset, samples=None): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ if data.shape == (1, 1): return data[0, 0] diff --git a/holoviews/core/data/cudf.py b/holoviews/core/data/cudf.py index f316e5e6e4..069d9dcb47 100644 --- a/holoviews/core/data/cudf.py +++ b/holoviews/core/data/cudf.py @@ -16,8 +16,7 @@ class cuDFInterface(PandasInterface): - """ - The cuDFInterface allows a Dataset objects to wrap a cuDF + """The cuDFInterface allows a Dataset objects to wrap a cuDF DataFrame object. Using cuDF allows working with columnar data on a GPU. Most operations leave the data in GPU memory, however to plot the data it has to be loaded into memory. @@ -29,6 +28,7 @@ class cuDFInterface(PandasInterface): (see https://github.com/rapidsai/cudf/issues/4237) 3) Not all functions can be easily applied to a cuDF so some functions applied with aggregate and reduce will not work. + """ datatype = 'cuDF' @@ -183,11 +183,11 @@ def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): @classmethod def select_mask(cls, dataset, selection): - """ - Given a Dataset object and a dictionary with dimension keys and + """Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. + """ mask = None for dim, sel in selection.items(): diff --git a/holoviews/core/data/dask.py b/holoviews/core/data/dask.py index 2b021f5a54..746bc556ed 100644 --- a/holoviews/core/data/dask.py +++ b/holoviews/core/data/dask.py @@ -12,8 +12,7 @@ class DaskInterface(PandasInterface): - """ - The DaskInterface allows a Dataset objects to wrap a dask + """The DaskInterface allows a Dataset objects to wrap a dask DataFrame object. Using dask allows loading data lazily and performing out-of-core operations on the data, making it possible to work on datasets larger than memory. @@ -28,6 +27,7 @@ class DaskInterface(PandasInterface): error when supplied a non-scalar value. 4) Not all functions can be easily applied to a dask dataframe so some functions applied with aggregate and reduce will not work. + """ types = () @@ -112,11 +112,11 @@ def values(cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index @classmethod def select_mask(cls, dataset, selection): - """ - Given a Dataset object and a dictionary with dimension keys and + """Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists. or literals) return a boolean mask over the rows in the Dataset object that have been selected. + """ select_mask = None for dim, k in selection.items(): @@ -262,9 +262,9 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ import dask.dataframe as dd if len(data.columns) > 1 or len(data) != 1: @@ -321,9 +321,9 @@ def nonzero(cls, dataset): @classmethod def iloc(cls, dataset, index): - """ - Dask does not support iloc, therefore iloc will execute + """Dask does not support iloc, therefore iloc will execute the call graph and lose the laziness of the operation. + """ rows, cols = index scalar = False diff --git a/holoviews/core/data/dictionary.py b/holoviews/core/data/dictionary.py index 9db91c7587..025546444b 100644 --- a/holoviews/core/data/dictionary.py +++ b/holoviews/core/data/dictionary.py @@ -11,10 +11,10 @@ class DictInterface(Interface): - """ - Interface for simple dictionary-based dataset format. The dictionary + """Interface for simple dictionary-based dataset format. The dictionary keys correspond to the column (i.e. dimension) names and the values are collections representing the values in that column. + """ types = (dict, OrderedDict) @@ -135,9 +135,9 @@ def validate(cls, dataset, vdims=True): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ if len(data) != 1: return data diff --git a/holoviews/core/data/grid.py b/holoviews/core/data/grid.py index 9fa1a5f592..62f6ff7a59 100644 --- a/holoviews/core/data/grid.py +++ b/holoviews/core/data/grid.py @@ -12,8 +12,7 @@ class GridInterface(DictInterface): - """ - Interface for simple dictionary-based dataset format using a + """Interface for simple dictionary-based dataset format using a compressed representation that uses the cartesian product between key dimensions. As with DictInterface, the dictionary keys correspond to the column (i.e. dimension) names and the values are NumPy arrays @@ -25,6 +24,7 @@ class GridInterface(DictInterface): instance, given an temperature recordings sampled regularly across the earth surface, a list of N unique latitudes and M unique longitudes can specify the position of NxM temperature samples. + """ types = (dict,) @@ -245,6 +245,7 @@ def _infer_interval_breaks(cls, coord, axis=0): >>> GridInterface._infer_interval_breaks([[0, 1], [3, 4]], axis=1) array([[-0.5, 0.5, 1.5], [ 2.5, 3.5, 4.5]]) + """ coord = np.asarray(coord) if coord.shape[axis] == 0: @@ -262,10 +263,10 @@ def _infer_interval_breaks(cls, coord, axis=0): @classmethod def coords(cls, dataset, dim, ordered=False, expanded=False, edges=False): - """ - Returns the coordinates along a dimension. Ordered ensures + """Returns the coordinates along a dimension. Ordered ensures coordinates are in ascending order and expanded creates ND-array matching the dimensionality of the dataset. + """ dim = dataset.get_dimension(dim, strict=True) irregular = cls.irregular(dataset, dim) @@ -298,8 +299,7 @@ def coords(cls, dataset, dim, ordered=False, expanded=False, edges=False): @classmethod def canonicalize(cls, dataset, data, data_coords=None, virtual_coords=None): - """ - Canonicalize takes an array of values as input and reorients + """Canonicalize takes an array of values as input and reorients and transposes it to match the canonical format expected by plotting functions. In certain cases the dimensions defined via the kdims of an Element may not match the dimensions of @@ -311,6 +311,7 @@ def canonicalize(cls, dataset, data, data_coords=None, virtual_coords=None): by some interfaces (e.g. xarray) to index irregular datasets with a virtual integer index. This ensures these coordinates are not simply dropped. + """ if virtual_coords is None: virtual_coords = [] @@ -642,8 +643,8 @@ def mask(cls, dataset, mask, mask_val=np.nan): @classmethod def sample(cls, dataset, samples=None): - """ - Samples the gridded data into dataset of samples. + """Samples the gridded data into dataset of samples. + """ if samples is None: samples = [] diff --git a/holoviews/core/data/ibis.py b/holoviews/core/data/ibis.py index 06011025b0..e59b4ca4f7 100644 --- a/holoviews/core/data/ibis.py +++ b/holoviews/core/data/ibis.py @@ -281,9 +281,9 @@ def iloc(cls, dataset, index): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ if IBIS_GE_4_0_0: count = data.count().execute() diff --git a/holoviews/core/data/image.py b/holoviews/core/data/image.py index 3834cb33b2..9680310e8c 100644 --- a/holoviews/core/data/image.py +++ b/holoviews/core/data/image.py @@ -12,9 +12,9 @@ class ImageInterface(GridInterface): - """ - Interface for 2 or 3D arrays representing images + """Interface for 2 or 3D arrays representing images of raw luminance values, RGB values or HSV values. + """ types = (np.ndarray,) @@ -69,7 +69,9 @@ def init(cls, eltype, data, kdims, vdims): @classmethod def irregular(cls, dataset, dim): - "ImageInterface does not support irregular data" + """ImageInterface does not support irregular data + + """ return False @classmethod @@ -165,8 +167,8 @@ def range(cls, obj, dim): def values( cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index=False ): - """ - The set of samples available along a particular dimension. + """The set of samples available along a particular dimension. + """ dim_idx = dataset.get_dimension_index(dim) if dim_idx in [0, 1]: @@ -214,8 +216,8 @@ def mask(cls, dataset, mask, mask_val=np.nan): @classmethod def select(cls, dataset, selection_mask=None, **selection): - """ - Slice the underlying numpy array in sheet coordinates. + """Slice the underlying numpy array in sheet coordinates. + """ selection = {k: slice(*sel) if isinstance(sel, tuple) else sel for k, sel in selection.items()} @@ -240,13 +242,13 @@ def select(cls, dataset, selection_mask=None, **selection): @classmethod def sample(cls, dataset, samples=None): - """ - Sample the Raster along one or both of its dimensions, + """Sample the Raster along one or both of its dimensions, returning a reduced dimensionality type, which is either a ItemTable, Curve or Scatter. If two dimension samples and a new_xaxis is provided the sample will be the value of the sampled unit indexed by the value in the new_xaxis tuple. + """ if samples is None: samples = [] @@ -293,9 +295,9 @@ def groupby(cls, dataset, dim_names, container_type, group_type, **kwargs): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ if np.isscalar(data) or len(data) != 1: return data diff --git a/holoviews/core/data/interface.py b/holoviews/core/data/interface.py index e1c03154c8..34d9e4ed51 100644 --- a/holoviews/core/data/interface.py +++ b/holoviews/core/data/interface.py @@ -11,7 +11,9 @@ class DataError(ValueError): - "DataError is raised when the data cannot be interpreted" + """DataError is raised when the data cannot be interpreted + + """ def __init__(self, msg, interface=None): if interface is not None: @@ -53,12 +55,12 @@ def _perform_getitem(cls, dataset, index): class iloc(Accessor): - """ - iloc is small wrapper object that allows row, column based + """iloc is small wrapper object that allows row, column based indexing into a Dataset using the ``.iloc`` property. It supports the usual numpy and pandas iloc indexing semantics including integer indices, slices, lists and arrays of values. For more information see the ``Dataset.iloc`` property docstring. + """ @classmethod @@ -99,13 +101,14 @@ def _perform_getitem(cls, dataset, index): class ndloc(Accessor): - """ - ndloc is a small wrapper object that allows ndarray-like indexing + """ndloc is a small wrapper object that allows ndarray-like indexing for gridded Datasets using the ``.ndloc`` property. It supports the standard NumPy ndarray indexing semantics including integer indices, slices, lists and arrays of values. For more information see the ``Dataset.ndloc`` property docstring. + """ + @classmethod def _perform_getitem(cls, dataset, indices): ds = dataset @@ -140,19 +143,19 @@ class Interface(param.Parameterized): @classmethod def loaded(cls): - """ - Indicates whether the required dependencies are loaded. + """Indicates whether the required dependencies are loaded. + """ return True @classmethod def applies(cls, obj): - """ - Indicates whether the interface is designed specifically to + """Indicates whether the interface is designed specifically to handle the supplied object's type. By default simply checks if the object is one of the types declared on the class, however if the type is expensive to import at load time the method may be overridden. + """ return type(obj) in cls.types @@ -162,10 +165,10 @@ def register(cls, interface): @classmethod def cast(cls, datasets, datatype=None, cast_type=None): - """ - Given a list of Dataset objects, cast them to the specified + """Given a list of Dataset objects, cast them to the specified datatype (by default the format matching the current interface) with the given cast_type (if specified). + """ datatype = datatype or cls.datatype cast = [] @@ -282,15 +285,15 @@ def validate(cls, dataset, vdims=True): @classmethod def persist(cls, dataset): - """ - Should return a persisted version of the Dataset. + """Should return a persisted version of the Dataset. + """ return dataset @classmethod def compute(cls, dataset): - """ - Should return a computed version of the Dataset. + """Should return a computed version of the Dataset. + """ return dataset @@ -304,9 +307,9 @@ def isscalar(cls, dataset, dim): @classmethod def isunique(cls, dataset, dim, per_geom=False): - """ - Compatibility method introduced for v1.13.0 to smooth + """Compatibility method introduced for v1.13.0 to smooth over addition of per_geom kwarg for isscalar method. + """ try: return cls.isscalar(dataset, dim, per_geom) @@ -324,8 +327,8 @@ def dtype(cls, dataset, dimension): @classmethod def replace_value(cls, data, nodata): - """ - Replace `nodata` value in data with NaN + """Replace `nodata` value in data with NaN + """ data = data.astype('float64') mask = data != nodata @@ -335,11 +338,11 @@ def replace_value(cls, data, nodata): @classmethod def select_mask(cls, dataset, selection): - """ - Given a Dataset object and a dictionary with dimension keys and + """Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. + """ mask = np.ones(len(dataset), dtype=np.bool_) for dim, sel in selection.items(): @@ -395,9 +398,9 @@ def _select_mask_neighbor(cls, dataset, selection): @classmethod def indexed(cls, dataset, selection): - """ - Given a Dataset object and selection to be applied returns + """Given a Dataset object and selection to be applied returns boolean to indicate whether a scalar value has been indexed. + """ selected = list(selection.keys()) all_scalar = all((not isinstance(sel, (tuple, slice, set, list)) @@ -427,8 +430,8 @@ def range(cls, dataset, dimension): @classmethod def concatenate(cls, datasets, datatype=None, new_type=None): - """ - Utility function to concatenate an NdMapping of Dataset objects. + """Utility function to concatenate an NdMapping of Dataset objects. + """ from . import Dataset, default_datatype new_type = new_type or Dataset @@ -524,8 +527,8 @@ def holes(cls, dataset): @classmethod def as_dframe(cls, dataset): - """ - Returns the data of a Dataset as a dataframe avoiding copying + """Returns the data of a Dataset as a dataframe avoiding copying if it already a dataframe type. + """ return dataset.dframe() diff --git a/holoviews/core/data/multipath.py b/holoviews/core/data/multipath.py index 354a8728bc..2b266e8c08 100644 --- a/holoviews/core/data/multipath.py +++ b/holoviews/core/data/multipath.py @@ -8,8 +8,7 @@ class MultiInterface(Interface): - """ - MultiInterface allows wrapping around a list of tabular datasets + """MultiInterface allows wrapping around a list of tabular datasets including dataframes, the columnar dictionary format or 2D tabular NumPy arrays. Using the split method the list of tabular data can be split into individual datasets. @@ -17,6 +16,7 @@ class MultiInterface(Interface): The interface makes the data appear a list of tabular datasets as a single dataset. The interface may be used to represent geometries so the behavior depends on the type of geometry being represented. + """ types = () @@ -121,9 +121,9 @@ def geom_type(cls, dataset): @classmethod def _inner_dataset_template(cls, dataset, validate_vdims=True): - """ - Returns a Dataset template used as a wrapper around the data + """Returns a Dataset template used as a wrapper around the data contained within the multi-interface dataset. + """ from . import Dataset vdims = dataset.vdims if getattr(dataset, 'level', None) is None else [] @@ -193,8 +193,8 @@ def holes(cls, dataset): @classmethod def isscalar(cls, dataset, dim, per_geom=False): - """ - Tests if dimension is scalar in each subpath. + """Tests if dimension is scalar in each subpath. + """ if not dataset.data: return True @@ -218,8 +218,8 @@ def isscalar(cls, dataset, dim, per_geom=False): @classmethod def select(cls, dataset, selection_mask=None, **selection): - """ - Applies selectiong on all the subpaths. + """Applies selectiong on all the subpaths. + """ from ...element import Polygons if not dataset.data: @@ -245,8 +245,8 @@ def select(cls, dataset, selection_mask=None, **selection): @classmethod def select_paths(cls, dataset, index): - """ - Allows selecting paths with usual NumPy slicing index. + """Allows selecting paths with usual NumPy slicing index. + """ selection = np.array([{0: p} for p in dataset.data])[index] if isinstance(selection, dict): @@ -307,9 +307,9 @@ def sample(cls, dataset, samples=None): @classmethod def shape(cls, dataset): - """ - Returns the shape of all subpaths, making it appear like a + """Returns the shape of all subpaths, making it appear like a single array of concatenated subpaths separated by NaN values. + """ if not dataset.data: return (0, len(dataset.dimensions())) @@ -326,10 +326,10 @@ def shape(cls, dataset): @classmethod def length(cls, dataset): - """ - Returns the length of the multi-tabular dataset making it appear + """Returns the length of the multi-tabular dataset making it appear like a single array of concatenated subpaths separated by NaN values. + """ if not dataset.data: return 0 @@ -388,10 +388,10 @@ def redim(cls, dataset, dimensions): @classmethod def values(cls, dataset, dimension, expanded=True, flat=True, compute=True, keep_index=False): - """ - Returns a single concatenated array of all subpaths separated + """Returns a single concatenated array of all subpaths separated by NaN values. If expanded keyword is False an array of arrays is returned. + """ if not dataset.data: return np.array([]) @@ -440,9 +440,9 @@ def values(cls, dataset, dimension, expanded=True, flat=True, @classmethod def split(cls, dataset, start, end, datatype, **kwargs): - """ - Splits a multi-interface Dataset into regular Datasets using + """Splits a multi-interface Dataset into regular Datasets using regular tabular interfaces. + """ objs = [] if datatype is None: @@ -556,12 +556,17 @@ def ensure_ring(geom, values=None): length) then the insertion will occur on the values instead, ensuring that they will match the ring geometry. - Args: - geom: 2-D array of geometry coordinates - values: Optional array of values + Parameters + ---------- + geom + 2-D array of geometry coordinates + values + Optional array of values + + Returns + ------- + Array where values have been inserted and ring closing indexes - Returns: - Array where values have been inserted and ring closing indexes """ if values is None: values = geom diff --git a/holoviews/core/data/pandas.py b/holoviews/core/data/pandas.py index dd7bad5e85..7a30a6284e 100644 --- a/holoviews/core/data/pandas.py +++ b/holoviews/core/data/pandas.py @@ -12,8 +12,7 @@ class PandasAPI: - """ - This class is used to describe the interface as having a pandas-like API. + """This class is used to describe the interface as having a pandas-like API. The reason to have this class is that it is not always possible to directly inherit from the PandasInterface. @@ -21,6 +20,7 @@ class PandasAPI: This class should not have any logic as it should be used like: if issubclass(interface, PandasAPI): ... + """ @@ -312,9 +312,9 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. + """ if len(data) != 1 or len(data.columns) > 1: return data @@ -480,9 +480,9 @@ def assign(cls, dataset, new_data): @classmethod def as_dframe(cls, dataset): - """ - Returns the data of a Dataset as a dataframe avoiding copying + """Returns the data of a Dataset as a dataframe avoiding copying if it already a dataframe type. + """ if issubclass(dataset.interface, PandasInterface): if any(cls.isindex(dataset, dim) for dim in dataset.dimensions()): diff --git a/holoviews/core/data/spatialpandas.py b/holoviews/core/data/spatialpandas.py index 5a8af8b8cd..c081a94a77 100644 --- a/holoviews/core/data/spatialpandas.py +++ b/holoviews/core/data/spatialpandas.py @@ -199,8 +199,8 @@ def dimension_type(cls, dataset, dim): @classmethod def isscalar(cls, dataset, dim, per_geom=False): - """ - Tests if dimension is scalar in each subpath. + """Tests if dimension is scalar in each subpath. + """ dim = dataset.get_dimension(dim) if (dim in cls.geom_dims(dataset)): @@ -457,12 +457,16 @@ def as_dframe(cls, dataset): def get_geom_type(gdf, col): """Return the HoloViews geometry type string for the geometry column. - Args: - gdf: The GeoDataFrame to get the geometry from - col: The geometry column + Parameters + ---------- + gdf + The GeoDataFrame to get the geometry from + col + The geometry column - Returns: - A string representing the type of geometry + Returns + ------- + A string representing the type of geometry """ from spatialpandas.geometry import ( LineDtype, @@ -488,13 +492,17 @@ def get_geom_type(gdf, col): def geom_to_array(geom, index=None, multi=False, geom_type=None): """Converts spatialpandas geometry to an array. - Args: - geom: spatialpandas geometry - index: The column index to return - multi: Whether to concatenate multiple arrays or not - - Returns: - Array or list of arrays. + Parameters + ---------- + geom : spatialpandas geometry + index + The column index to return + multi + Whether to concatenate multiple arrays or not + + Returns + ------- + Array or list of arrays. """ from spatialpandas.geometry import ( Line, @@ -542,12 +550,15 @@ def geom_to_array(geom, index=None, multi=False, geom_type=None): def geom_array_to_array(geom_array, index, expand=False, geom_type=None): """Converts spatialpandas extension arrays to a flattened array. - Args: - geom: spatialpandas geometry - index: The column index to return + Parameters + ---------- + geom : spatialpandas geometry + index + The column index to return - Returns: - Flattened array + Returns + ------- + Flattened array """ from spatialpandas.geometry import MultiPointArray, PointArray if isinstance(geom_array, PointArray): @@ -604,17 +615,25 @@ def get_value_array(data, dimension, expanded, keep_index, geom_col, is_points, geom_length=geom_length): """Returns an array of values from a GeoDataFrame. - Args: - data: GeoDataFrame - dimension: The dimension to get the values from - expanded: Whether to expand the value array - keep_index: Whether to return a Series - geom_col: The column in the data that contains the geometries - is_points: Whether the geometries are points - geom_length: The function used to compute the length of each geometry - - Returns: - An array containing the values along a dimension + Parameters + ---------- + data : GeoDataFrame + dimension + The dimension to get the values from + expanded + Whether to expand the value array + keep_index + Whether to return a Series + geom_col + The column in the data that contains the geometries + is_points + Whether the geometries are points + geom_length + The function used to compute the length of each geometry + + Returns + ------- + An array containing the values along a dimension """ if not len(data): return np.array([]) @@ -658,11 +677,13 @@ def get_value_array(data, dimension, expanded, keep_index, geom_col, def geom_to_holes(geom): """Extracts holes from spatialpandas Polygon geometries. - Args: - geom: spatialpandas geometry + Parameters + ---------- + geom : spatialpandas geometry - Returns: - List of arrays representing holes + Returns + ------- + List of arrays representing holes """ from spatialpandas.geometry import MultiPolygon, Polygon if isinstance(geom, Polygon): @@ -693,15 +714,20 @@ def geom_to_holes(geom): def to_spatialpandas(data, xdim, ydim, columns=None, geom='point'): """Converts list of dictionary format geometries to spatialpandas line geometries. - Args: - data: List of dictionaries representing individual geometries - xdim: Name of x-coordinates column - ydim: Name of y-coordinates column - columns: List of columns to add - geom: The type of geometry - - Returns: - A spatialpandas.GeoDataFrame version of the data + Parameters + ---------- + data : List of dictionaries representing individual geometries + xdim + Name of x-coordinates column + ydim + Name of y-coordinates column + columns : List of columns to add + geom + The type of geometry + + Returns + ------- + A spatialpandas.GeoDataFrame version of the data """ from spatialpandas import GeoDataFrame, GeoSeries from spatialpandas.geometry import ( @@ -824,14 +850,20 @@ def to_spatialpandas(data, xdim, ydim, columns=None, geom='point'): def to_geom_dict(eltype, data, kdims, vdims, interface=None): """Converts data from any list format to a dictionary based format. - Args: - eltype: Element type to convert - data: The original data - kdims: The declared key dimensions - vdims: The declared value dimensions - - Returns: - A list of dictionaries containing geometry coordinates and values. + Parameters + ---------- + eltype + Element type to convert + data + The original data + kdims + The declared key dimensions + vdims + The declared value dimensions + + Returns + ------- + A list of dictionaries containing geometry coordinates and values. """ from . import Dataset @@ -856,14 +888,20 @@ def to_geom_dict(eltype, data, kdims, vdims, interface=None): def from_multi(eltype, data, kdims, vdims): """Converts list formats into spatialpandas.GeoDataFrame. - Args: - eltype: Element type to convert - data: The original data - kdims: The declared key dimensions - vdims: The declared value dimensions - - Returns: - A GeoDataFrame containing in the list based format. + Parameters + ---------- + eltype + Element type to convert + data + The original data + kdims + The declared key dimensions + vdims + The declared value dimensions + + Returns + ------- + A GeoDataFrame containing in the list based format. """ from spatialpandas import GeoDataFrame @@ -895,14 +933,16 @@ def from_multi(eltype, data, kdims, vdims): def from_shapely(data): """Converts shapely based data formats to spatialpandas.GeoDataFrame. - Args: - data: A list of shapely objects or dictionaries containing - shapely objects + Parameters + ---------- + data + A list of shapely objects or dictionaries containing + shapely objects - Returns: - A GeoDataFrame containing the shapely geometry data. + Returns + ------- + A GeoDataFrame containing the shapely geometry data. """ - from shapely.geometry.base import BaseGeometry from spatialpandas import GeoDataFrame, GeoSeries @@ -932,6 +972,7 @@ def _asarray(v): Reason why it is not located in holoviews.core.util is that there is a already a function called `asarray`. + """ try: return np.asarray(v) diff --git a/holoviews/core/data/util.py b/holoviews/core/data/util.py index 15b9718fe5..268c53be30 100644 --- a/holoviews/core/data/util.py +++ b/holoviews/core/data/util.py @@ -56,8 +56,8 @@ def is_dask(array): return da and isinstance(array, da.Array) def cached(method): - """ - Decorates an Interface method and using a cached version + """Decorates an Interface method and using a cached version + """ def cached(*args, **kwargs): cache = args[1]._cached diff --git a/holoviews/core/data/xarray.py b/holoviews/core/data/xarray.py index 38d25f9649..42fafa954b 100644 --- a/holoviews/core/data/xarray.py +++ b/holoviews/core/data/xarray.py @@ -442,8 +442,7 @@ def aggregate(cls, dataset, dimensions, function, **kwargs): @classmethod def unpack_scalar(cls, dataset, data): - """ - Given a dataset object and data in the appropriate format for + """Given a dataset object and data in the appropriate format for the interface, return a simple scalar. """ if cls.packed(dataset): diff --git a/holoviews/core/decollate.py b/holoviews/core/decollate.py index 96da93bca5..c0a28c7c3f 100644 --- a/holoviews/core/decollate.py +++ b/holoviews/core/decollate.py @@ -25,32 +25,34 @@ def to_expr_extract_streams( hvobj, kdims, streams, original_streams, stream_mapping, container_key=None ): - """ - Build a HoloViewsExpr expression tree from a potentially nested dynamic + """Build a HoloViewsExpr expression tree from a potentially nested dynamic HoloViews object, extracting the streams and replacing them with StreamIndex objects. This function is recursive an assumes that initialize_dynamic has already been called on the input object. - Args: - hvobj: Element or DynamicMap or Layout - Potentially dynamic HoloViews object to represent as a HoloviewsExpr - kdims: list of Dimensions - List that DynamicMap key-dimension objects should be added to - streams: list of Stream - List that cloned extracted streams should be added to - original_streams: list of Stream - List that original extracted streams should be added to - stream_mapping: dict - dict to be populated with mappings from container keys to extracted Stream - objects, as described by the Callable parameter of the same name. - container_key: int or tuple - key into parent container that is associated to hvobj, or None if hvobj is - not in a container - Returns: - HoloviewsExpr expression representing hvobj if hvobj is dynamic. Otherwise, - return hvobj itself + Parameters + ---------- + hvobj : Element or DynamicMap or Layout + Potentially dynamic HoloViews object to represent as a HoloviewsExpr + kdims : list of Dimensions + List that DynamicMap key-dimension objects should be added to + streams : list of Stream + List that cloned extracted streams should be added to + original_streams : list of Stream + List that original extracted streams should be added to + stream_mapping : dict + dict to be populated with mappings from container keys to extracted Stream + objects, as described by the Callable parameter of the same name. + container_key : int or tuple + key into parent container that is associated to hvobj, or None if hvobj is + not in a container + + Returns + ------- + HoloviewsExpr expression representing hvobj if hvobj is dynamic. Otherwise, + return hvobj itself """ if isinstance(hvobj, DynamicMap): args = [] @@ -222,16 +224,17 @@ def expr_fn(*args): def decollate(hvobj): - """ - Decollate transforms a potentially nested dynamic HoloViews object into single + """Decollate transforms a potentially nested dynamic HoloViews object into single DynamicMap that returns a non-dynamic HoloViews object. All nested streams in the input object are copied and attached to the resulting DynamicMap. - Args: - hvobj: Holoviews object + Parameters + ---------- + hvobj : Holoviews object - Returns: - DynamicMap + Returns + ------- + DynamicMap """ kdims = [] original_streams = [] diff --git a/holoviews/core/dimension.py b/holoviews/core/dimension.py index a9b2c91b2a..431c4d7e36 100644 --- a/holoviews/core/dimension.py +++ b/holoviews/core/dimension.py @@ -1,7 +1,7 @@ -""" -Provides Dimension objects for tracking the properties of a value, +"""Provides Dimension objects for tracking the properties of a value, axis or map dimension. Also supplies the Dimensioned abstract baseclass for classes that accept Dimension values. + """ from __future__ import annotations @@ -35,11 +35,12 @@ redim = Redim # pickle compatibility - remove in 2.0 def param_aliases(d): - """ - Called from __setstate__ in LabelledData in order to load + """Called from __setstate__ in LabelledData in order to load old pickles with outdated parameter names. - Warning: We want to keep pickle hacking to a minimum! + Warning + ------- + We want to keep pickle hacking to a minimum! """ for old, new in ALIASES.items(): old_param = f'_{old}_param_value' @@ -52,12 +53,14 @@ def param_aliases(d): def asdim(dimension): """Convert the input to a Dimension. - Args: - dimension: tuple, dict or string type to convert to Dimension + Parameters + ---------- + dimension : tuple, dict or string type to convert to Dimension - Returns: - A Dimension object constructed from the dimension spec. No - copy is performed if the input is already a Dimension. + Returns + ------- + A Dimension object constructed from the dimension spec. No + copy is performed if the input is already a Dimension. """ return dimension if isinstance(dimension, Dimension) else Dimension(dimension) @@ -65,12 +68,14 @@ def asdim(dimension): def dimension_name(dimension): """Return the Dimension.name for a dimension-like object. - Args: - dimension: Dimension or dimension string, tuple or dict + Parameters + ---------- + dimension : Dimension or dimension string, tuple or dict - Returns: - The name of the Dimension or what would be the name if the - input as converted to a Dimension. + Returns + ------- + The name of the Dimension or what would be the name if the + input as converted to a Dimension. """ if isinstance(dimension, Dimension): return dimension.name @@ -91,16 +96,17 @@ def dimension_name(dimension): def process_dimensions(kdims, vdims): """Converts kdims and vdims to Dimension objects. - Args: - kdims: List or single key dimension(s) specified as strings, - tuples dicts or Dimension objects. - vdims: List or single value dimension(s) specified as strings, - tuples dicts or Dimension objects. - - Returns: - Dictionary containing kdims and vdims converted to Dimension - objects: - + Parameters + ---------- + kdims : List or single key dimension(s) specified as strings, + tuples dicts or Dimension objects. + vdims : List or single value dimension(s) specified as strings, + tuples dicts or Dimension objects. + + Returns + ------- + Dictionary containing kdims and vdims converted to Dimension + objects {'kdims': [Dimension('x')], 'vdims': [Dimension('y')] """ dimensions = {} @@ -122,8 +128,7 @@ def process_dimensions(kdims, vdims): class Dimension(param.Parameterized): - """ - Dimension objects are used to specify some important general + """Dimension objects are used to specify some important general features that may be associated with a collection of values. For instance, a Dimension may specify that a set of numeric values @@ -165,6 +170,7 @@ class Dimension(param.Parameterized): name of the unit and whether or not it is cyclic. The name of the unit is used as part of the pretty-printed representation and knowing whether it is cyclic is important for certain operations. + """ name = param.String(doc=""" @@ -228,8 +234,8 @@ class Dimension(param.Parameterized): # (name, unit) to a preset Dimension object def __init__(self, spec, **params): - """ - Initializes the Dimension object with the given name. + """Initializes the Dimension object with the given name. + """ if 'name' in params: raise KeyError('Dimension name must only be passed as the positional argument') @@ -299,8 +305,9 @@ def __init__(self, spec, **params): def spec(self): """"Returns the Dimensions tuple specification - Returns: - tuple: Dimension tuple specification + Returns + ------- + tuple : Dimension tuple specification """ return (self.name, self.label) @@ -310,12 +317,15 @@ def clone(self, spec=None, **overrides): Derive a new Dimension that inherits existing parameters except for the supplied, explicit overrides - Args: - spec (tuple, optional): Dimension tuple specification - **overrides: Dimension parameter overrides + Parameters + ---------- + spec : tuple, optional + Dimension tuple specification + **overrides: Dimension parameter overrides - Returns: - Cloned Dimension object + Returns + ------- + Cloned Dimension object """ settings = dict(self.param.values(), **overrides) @@ -338,16 +348,17 @@ def __hash__(self): return hash(self.spec) def __setstate__(self, d): - """ - Compatibility for pickles before alias attribute was introduced. + """Compatibility for pickles before alias attribute was introduced. + """ super().__setstate__(d) if '_label_param_value' not in d: self.label = self.name def __eq__(self, other): - "Implements equals operator including sanitized comparison." + """Implements equals operator including sanitized comparison. + """ if isinstance(other, Dimension): return self.label == other.label @@ -355,11 +366,15 @@ def __eq__(self, other): return other in [self.name, self.label, util.dimension_sanitizer(self.name)] def __ne__(self, other): - "Implements not equal operator including sanitized comparison." + """Implements not equal operator including sanitized comparison. + + """ return not self.__eq__(other) def __lt__(self, other): - "Dimensions are sorted alphanumerically by name" + """Dimensions are sorted alphanumerically by name + + """ return self.name < other.name if isinstance(other, Dimension) else self.name < other def __str__(self): @@ -370,7 +385,9 @@ def __repr__(self): @property def pprint_label(self): - "The pretty-printed label string for the Dimension" + """The pretty-printed label string for the Dimension + + """ unit = ('' if self.unit is None else type(self.unit)(self.unit_format).format(unit=self.unit)) return bytes_to_unicode(self.label) + bytes_to_unicode(unit) @@ -392,11 +409,14 @@ def pprint(self): def pprint_value(self, value, print_unit=False): """Applies the applicable formatter to the value. - Args: - value: Dimension value to format + Parameters + ---------- + value + Dimension value to format - Returns: - Formatted dimension value + Returns + ------- + Formatted dimension value """ own_type = type(value) if self.type is None else self.type formatter = (self.value_format if self.value_format @@ -423,11 +443,14 @@ def pprint_value(self, value, print_unit=False): def pprint_value_string(self, value): """Pretty print the dimension value and unit with title_format - Args: - value: Dimension value to format + Parameters + ---------- + value + Dimension value to format - Returns: - Formatted dimension value string with unit + Returns + ------- + Formatted dimension value string with unit """ unit = '' if self.unit is None else ' ' + bytes_to_unicode(self.unit) value = self.pprint_value(value) @@ -435,8 +458,7 @@ def pprint_value_string(self, value): class LabelledData(param.Parameterized): - """ - LabelledData is a mix-in class designed to introduce the group and + """LabelledData is a mix-in class designed to introduce the group and label parameters (and corresponding methods) to any class containing data. This class assumes that the core data contents will be held in the attribute called 'data'. @@ -451,7 +473,9 @@ class LabelledData(param.Parameterized): [group='Height', label='Children'] and another may use [group='Height', label='Adults']. - Note: Another level of specification is implicit in the type (i.e + Note + ---- + Another level of specification is implicit in the type (i.e class) of the LabelledData object. A full specification of a LabelledData object is therefore given by the tuple (, , label>). This additional level of specification is @@ -464,6 +488,7 @@ class LabelledData(param.Parameterized): Otherwise the strings provided will be sanitized to be valid capitalized Python identifiers, which works fine but can sometimes be confusing. + """ group = param.String(default='LabelledData', constant=True, doc=""" @@ -478,8 +503,7 @@ class LabelledData(param.Parameterized): _deep_indexable = False def __init__(self, data, id=None, plot_id=None, **params): - """ - All LabelledData subclasses must supply data to the + """All LabelledData subclasses must supply data to the constructor, which will be held on the .data attribute. This class also has an id instance attribute, which may be set to associate some custom options with the object. @@ -527,18 +551,26 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - link (bool, optional): Whether clone should be linked - Determines whether Streams and Links attached to - original object will be inherited. - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + link : bool, optional + Whether clone should be linked + Determines whether Streams and Links attached to + original object will be inherited. + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned object """ params = self.param.values() if new_type is None: @@ -569,15 +601,20 @@ def relabel(self, label=None, group=None, depth=0): Applies relabeling to children up to the supplied depth. - Args: - label (str, optional): New label to apply to returned object - group (str, optional): New group to apply to returned object - depth (int, optional): Depth to which relabel will be applied - If applied to container allows applying relabeling to - contained objects up to the specified depth - - Returns: - Returns relabelled object + Parameters + ---------- + label : str, optional + New label to apply to returned object + group : str, optional + New group to apply to returned object + depth : int, optional + Depth to which relabel will be applied + If applied to container allows applying relabeling to + contained objects up to the specified depth + + Returns + ------- + Returns relabelled object """ new_data = self.data if (depth > 0) and getattr(self, '_deep_indexable', False): @@ -593,16 +630,19 @@ def relabel(self, label=None, group=None, depth=0): def matches(self, spec): """Whether the spec applies to this object. - Args: - spec: A function, spec or type to check for a match - * A 'type[[.group].label]' string which is compared - against the type, group and label of this object - * A function which is given the object and returns - a boolean. - * An object type matched using isinstance. - - Returns: - bool: Whether the spec matched this object. + Parameters + ---------- + spec : A function, spec or type to check for a match + * A 'type[[.group].label]' string which is compared + against the type, group and label of this object + * A function which is given the object and returns + a boolean. + * An object type matched using isinstance. + + Returns + ------- + bool + Whether the spec matched this object. """ if callable(spec) and not isinstance(spec, type): return spec(self) elif isinstance(spec, type): return isinstance(self, spec) @@ -626,17 +666,23 @@ def traverse(self, fn=None, specs=None, full_breadth=True): Traverses the set of children of the object, collecting the all objects matching the defined specs. Each object can be processed with the supplied function. - Args: - fn (function, optional): Function applied to matched objects - specs: List of specs to match - Specs must be types, functions or type[.group][.label] - specs to select objects to return, by default applies - to all objects. - full_breadth: Whether to traverse all objects - Whether to traverse the full set of objects on each - container or only the first. - Returns: - list: List of objects that matched + + Parameters + ---------- + fn : function, optional + Function applied to matched objects + specs : List of specs to match + Specs must be types, functions or type[.group][.label] + specs to select objects to return, by default applies + to all objects. + full_breadth : Whether to traverse all objects + Whether to traverse the full set of objects on each + container or only the first. + + Returns + ------- + list + List of objects that matched """ if fn is None: fn = lambda x: x @@ -670,16 +716,19 @@ def map(self, map_fn, specs=None, clone=True): dmap.map(fn, hv.Curve) - Args: - map_fn: Function to apply to each object - specs: List of specs to match - List of types, functions or type[.group][.label] specs - to select objects to return, by default applies to all - objects. - clone: Whether to clone the object or transform inplace - - Returns: - Returns the object after the map_fn has been applied + Parameters + ---------- + map_fn : Function to apply to each object + specs : List of specs to match + List of types, functions or type[.group][.label] specs + to select objects to return, by default applies to all + objects. + clone + Whether to clone the object or transform inplace + + Returns + ------- + Returns the object after the map_fn has been applied """ if specs is not None and not isinstance(specs, (list, set, tuple)): specs = [specs] @@ -698,7 +747,9 @@ def map(self, map_fn, specs=None, clone=True): def __getstate__(self): - "Ensures pickles save options applied to this objects." + """Ensures pickles save options applied to this objects. + + """ obj_dict = self.__dict__.copy() try: if Store.save_option_state and (obj_dict.get('_id', None) is not None): @@ -715,7 +766,9 @@ def __getstate__(self): def __setstate__(self, d): - "Restores options applied to this object." + """Restores options applied to this object. + + """ d = param_aliases(d) load_options = Store.load_counter_offset is not None @@ -742,46 +795,48 @@ def __setstate__(self, d): class Dimensioned(LabelledData): - """ - Dimensioned is a base class that allows the data contents of a + """Dimensioned is a base class that allows the data contents of a class to be associated with dimensions. The contents associated with dimensions may be partitioned into one of three types - * key dimensions: These are the dimensions that can be indexed via - the __getitem__ method. Dimension objects - supporting key dimensions must support indexing - over these dimensions and may also support - slicing. This list ordering of dimensions - describes the positional components of each - multi-dimensional indexing operation. - - For instance, if the key dimension names are - 'weight' followed by 'height' for Dimensioned - object 'obj', then obj[80,175] indexes a weight - of 80 and height of 175. - - Accessed using either kdims. - - * value dimensions: These dimensions correspond to any data held - on the Dimensioned object not in the key - dimensions. Indexing by value dimension is - supported by dimension name (when there are - multiple possible value dimensions); no - slicing semantics is supported and all the - data associated with that dimension will be - returned at once. Note that it is not possible - to mix value dimensions and deep dimensions. - - Accessed using either vdims. - - * deep dimensions: These are dynamically computed dimensions that - belong to other Dimensioned objects that are - nested in the data. Objects that support this - should enable the _deep_indexable flag. Note - that it is not possible to mix value dimensions - and deep dimensions. - - Accessed using either ddims. + * key dimensions + These are the dimensions that can be indexed via + the __getitem__ method. Dimension objects + supporting key dimensions must support indexing + over these dimensions and may also support + slicing. This list ordering of dimensions + describes the positional components of each + multi-dimensional indexing operation. + + For instance, if the key dimension names are + 'weight' followed by 'height' for Dimensioned + object 'obj', then obj[80,175] indexes a weight + of 80 and height of 175. + + Accessed using either kdims. + + * value dimensions + These dimensions correspond to any data held + on the Dimensioned object not in the key + dimensions. Indexing by value dimension is + supported by dimension name (when there are + multiple possible value dimensions); no + slicing semantics is supported and all the + data associated with that dimension will be + returned at once. Note that it is not possible + to mix value dimensions and deep dimensions. + + Accessed using either vdims. + + * deep dimensions + These are dynamically computed dimensions that + belong to other Dimensioned objects that are + nested in the data. Objects that support this + should enable the _deep_indexable flag. Note + that it is not possible to mix value dimensions + and deep dimensions. + + Accessed using either ddims. Dimensioned class support generalized methods for finding the range and type of values along a particular Dimension. The range @@ -791,6 +846,7 @@ class to be associated with dimensions. The contents associated The index of an arbitrary dimension is its positional index in the list of all dimensions, starting with the key dimensions, followed by the value dimensions and ending with the deep dimensions. + """ cdims = param.Dict(default={}, doc=""" @@ -851,7 +907,9 @@ def redim(self): def _valid_dimensions(self, dimensions): """Validates key dimension input - Returns kdims if no dimensions are specified""" + Returns kdims if no dimensions are specified + + """ if dimensions is None: dimensions = self.kdims elif not isinstance(dimensions, list): @@ -868,7 +926,9 @@ def _valid_dimensions(self, dimensions): @property def ddims(self): - "The list of deep dimensions" + """The list of deep dimensions + + """ if self._deep_indexable and self: return self.values()[0].dimensions() else: @@ -883,16 +943,18 @@ def dimensions(self, selection='all', label=False): or 'value' dimensions. By default 'all' dimensions are returned. - Args: - selection: Type of dimensions to return - The type of dimension, i.e. one of 'key', 'value', - 'constant' or 'all'. - label: Whether to return the name, label or Dimension - Whether to return the Dimension objects (False), - the Dimension names (True/'name') or labels ('label'). - - Returns: - List of Dimension objects or their names or labels + Parameters + ---------- + selection : Type of dimensions to return + The type of dimension, i.e. one of 'key', 'value', + 'constant' or 'all'. + label : Whether to return the name, label or Dimension + Whether to return the Dimension objects (False), + the Dimension names (True/'name') or labels ('label'). + + Returns + ------- + List of Dimension objects or their names or labels """ if label in ['name', True]: label = 'short' @@ -927,13 +989,17 @@ def dimensions(self, selection='all', label=False): def get_dimension(self, dimension, default=None, strict=False) -> Dimension | None: """Get a Dimension object by name or index. - Args: - dimension: Dimension to look up by name or integer index - default (optional): Value returned if Dimension not found - strict (bool, optional): Raise a KeyError if not found - - Returns: - Dimension object for the requested dimension or default + Parameters + ---------- + dimension : Dimension to look up by name or integer index + default : optional + Value returned if Dimension not found + strict : bool, optional + Raise a KeyError if not found + + Returns + ------- + Dimension object for the requested dimension or default """ if dimension is not None and not isinstance(dimension, (int, str, Dimension)): raise TypeError('Dimension lookup supports int, string, ' @@ -971,11 +1037,14 @@ def get_dimension(self, dimension, default=None, strict=False) -> Dimension | No def get_dimension_index(self, dimension): """Get the index of the requested dimension. - Args: - dimension: Dimension to look up by name or by index + Parameters + ---------- + dimension + Dimension to look up by name or by index - Returns: - Integer index of the requested dimension + Returns + ------- + Integer index of the requested dimension """ if isinstance(dimension, int): if (dimension < (self.ndims + len(self.vdims)) or @@ -997,11 +1066,14 @@ def get_dimension_type(self, dim): Type is determined by Dimension.type attribute or common type of the dimension values, otherwise None. - Args: - dimension: Dimension to look up by name or by index + Parameters + ---------- + dimension + Dimension to look up by name or by index - Returns: - Declared type of values along the dimension + Returns + ------- + Declared type of values along the dimension """ dim_obj = self.get_dimension(dim) if dim_obj and dim_obj.type is not None: @@ -1014,8 +1086,7 @@ def get_dimension_type(self, dim): def __getitem__(self, key): - """ - Multi-dimensional indexing semantics is determined by the list + """Multi-dimensional indexing semantics is determined by the list of key dimensions. For instance, the first indexing component will index the first key dimension. @@ -1037,33 +1108,35 @@ def select(self, selection_specs=None, **kwargs): Selections may select a specific value, slice or set of values: - * value: Scalar values will select rows along with an exact - match, e.g.: + * value + Scalar values will select rows along with an exact match, e.g.: ds.select(x=3) - * slice: Slices may be declared as tuples of the upper and - lower bound, e.g.: + * slice + Slices may be declared as tuples of the upper and lower bound, e.g.: ds.select(x=(0, 3)) - * values: A list of values may be selected using a list or - set, e.g.: + * values + A list of values may be selected using a list or set, e.g.: ds.select(x=[0, 1, 2]) - Args: - selection_specs: List of specs to match on - A list of types, functions, or type[.group][.label] - strings specifying which objects to apply the - selection on. - **selection: Dictionary declaring selections by dimension - Selections can be scalar values, tuple ranges, lists - of discrete values and boolean arrays - - Returns: - Returns an Dimensioned object containing the selected data - or a scalar if a single value was selected + Parameters + ---------- + selection_specs : List of specs to match on + A list of types, functions, or type[.group][.label] + strings specifying which objects to apply the + selection on. + **selection: Dictionary declaring selections by dimension + Selections can be scalar values, tuple ranges, lists + of discrete values and boolean arrays + + Returns + ------- + Returns an Dimensioned object containing the selected data + or a scalar if a single value was selected """ if selection_specs is not None and not isinstance(selection_specs, (list, tuple)): selection_specs = [selection_specs] @@ -1127,18 +1200,26 @@ def select(self, selection_specs=None, **kwargs): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar + If false returns unique values + * Geometry + If false returns scalar values per geometry + * Gridded + If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ val = self._cached_constants.get(dimension, None) if val: @@ -1150,15 +1231,20 @@ def dimension_values(self, dimension, expanded=True, flat=True): def range(self, dimension, data_range=True, dimension_range=True): """Return the lower and upper bounds of values along dimension. - Args: - dimension: The dimension to compute the range on. - data_range (bool): Compute range from data values - dimension_range (bool): Include Dimension ranges - Whether to include Dimension range and soft_range - in range calculation - - Returns: - Tuple containing the lower and upper bound + Parameters + ---------- + dimension + The dimension to compute the range on. + data_range : bool + Compute range from data values + dimension_range : bool + Include Dimension ranges + Whether to include Dimension range and soft_range + in range calculation + + Returns + ------- + Tuple containing the lower and upper bound """ dimension = self.get_dimension(dimension) if dimension is None or (not data_range and not dimension_range): @@ -1209,21 +1295,25 @@ def options(self, *args, clone=True, **kwargs): Identical to the .opts method but returns a clone of the object by default. - Args: - *args: Sets of options to apply to object - Supports a number of formats including lists of Options - objects, a type[.group][.label] followed by a set of - keyword options to apply and a dictionary indexed by - type[.group][.label] specs. - backend (optional): Backend to apply options to - Defaults to current selected backend - clone (bool, optional): Whether to clone object - Options can be applied inplace with clone=False - **kwargs: Keywords of options - Set of options to apply to the object - - Returns: - Returns the cloned object with the options applied + Parameters + ---------- + *args: Sets of options to apply to object + Supports a number of formats including lists of Options + objects, a type[.group][.label] followed by a set of + keyword options to apply and a dictionary indexed by + type[.group][.label] specs. + backend : optional + Backend to apply options to + Defaults to current selected backend + clone : bool, optional + Whether to clone object + Options can be applied inplace with clone=False + **kwargs: Keywords of options + Set of options to apply to the object + + Returns + ------- + Returns the cloned object with the options applied """ backend = kwargs.get('backend', None) @@ -1268,24 +1358,24 @@ def options(self, *args, clone=True, **kwargs): return obj def _repr_mimebundle_(self, include=None, exclude=None): - """ - Resolves the class hierarchy for the class rendering the + """Resolves the class hierarchy for the class rendering the object using any display hooks registered on Store.display hooks. The output of all registered display_hooks is then combined and returned. + """ return Store.render(self) class ViewableElement(Dimensioned): - """ - A ViewableElement is a dimensioned datastructure that may be + """A ViewableElement is a dimensioned datastructure that may be associated with a corresponding atomic visualization. An atomic visualization will display the data on a single set of axes (i.e. excludes multiple subplots that are displayed at once). The only new parameter introduced by ViewableElement is the title associated with the object for display. + """ __abstract = True @@ -1296,11 +1386,11 @@ class ViewableElement(Dimensioned): class ViewableTree(AttrTree, Dimensioned): - """ - A ViewableTree is an AttrTree with Viewable objects as its leaf + """A ViewableTree is an AttrTree with Viewable objects as its leaf nodes. It combines the tree like data structure of a tree while extending it with the deep indexable properties of Dimensioned and LabelledData objects. + """ group = param.String(default='ViewableTree', constant=True) @@ -1317,7 +1407,9 @@ def __init__(self, items=None, identifier=None, parent=None, **kwargs): @classmethod def _process_items(cls, vals): - "Processes list of items assigning unique paths to each." + """Processes list of items assigning unique paths to each. + + """ from .layout import AdjointLayout if type(vals) is cls: @@ -1336,8 +1428,7 @@ def _process_items(cls, vals): def __setstate__(self, d): - """ - Ensure that object does not try to reference its parent during + """Ensure that object does not try to reference its parent during unpickling. """ parent = d.pop('parent', None) @@ -1348,7 +1439,9 @@ def __setstate__(self, d): @classmethod def _deduplicate_items(cls, items): - "Deduplicates assigned paths by incrementing numbering" + """Deduplicates assigned paths by incrementing numbering + + """ counter = Counter([path[:i] for path, _ in items for i in range(1, len(path)+1)]) if sum(counter.values()) == len(counter): return items @@ -1370,8 +1463,7 @@ def _deduplicate_items(cls, items): @classmethod def _unpack_paths(cls, objs, items, counts): - """ - Recursively unpacks lists and ViewableTree-like objects, accumulating + """Recursively unpacks lists and ViewableTree-like objects, accumulating into the supplied list of items. """ if type(objs) is cls: @@ -1389,7 +1481,9 @@ def _unpack_paths(cls, objs, items, counts): @property def uniform(self): - "Whether items in tree have uniform dimensions" + """Whether items in tree have uniform dimensions + + """ from .traversal import uniform return uniform(self) @@ -1399,18 +1493,26 @@ def dimension_values(self, dimension, expanded=True, flat=True): Concatenates values on all nodes with requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar + If false returns unique values + * Geometry + If false returns scalar values per geometry + * Gridded + If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ dimension = self.get_dimension(dimension, strict=True).name all_dims = self.traverse(lambda x: [d.name for d in x.dimensions()]) diff --git a/holoviews/core/element.py b/holoviews/core/element.py index 9368703c90..62160ec348 100644 --- a/holoviews/core/element.py +++ b/holoviews/core/element.py @@ -14,13 +14,13 @@ class Element(ViewableElement, Composable, Overlayable): - """ - Element is the atomic datastructure used to wrap some data with + """Element is the atomic datastructure used to wrap some data with an associated visual representation, e.g. an element may represent a set of points, an image or a curve. Elements provide a common API for interacting with data of different types and define how the data map to a set of dimensions and how those map to the visual representation. + """ group = param.String(default='Element', constant=True) @@ -39,15 +39,21 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, Defaults to first value dimension if present otherwise falls back to first key dimension. - Args: - dimension: Dimension(s) to compute histogram on - num_bins (int, optional): Number of bins - bin_range (tuple optional): Lower and upper bounds of bins - adjoin (bool, optional): Whether to adjoin histogram - - Returns: - AdjointLayout of element and histogram or just the - histogram + Parameters + ---------- + dimension + Dimension(s) to compute histogram on + num_bins : int, optional + Number of bins + bin_range : tuple, optional + Lower and upper bounds of bins + adjoin : bool, optional + Whether to adjoin histogram + + Returns + ------- + AdjointLayout of element and histogram or just the + histogram """ from ..operation import histogram if not isinstance(dimension, list): dimension = [dimension] @@ -82,29 +88,40 @@ def __bool__(self): Subclasses may override this to signal that the Element contains no data and can safely be dropped during indexing. + """ return True def __contains__(self, dimension): - "Whether element contains the Dimension" + """Whether element contains the Dimension + + """ return dimension in self.dimensions() def __iter__(self): - "Disable iterator interface." + """Disable iterator interface. + + """ raise NotImplementedError('Iteration on Elements is not supported.') def closest(self, coords, **kwargs): """Snap list or dict of coordinates to closest position. - Args: - coords: List of 1D or 2D coordinates - **kwargs: Coordinates specified as keyword pairs - - Returns: - List of tuples of the snapped coordinates - - Raises: - NotImplementedError: Raised if snapping is not supported + Parameters + ---------- + coords + List of 1D or 2D coordinates + **kwargs + Coordinates specified as keyword pairs + + Returns + ------- + List of tuples of the snapped coordinates + + Raises + ------ + NotImplementedError + Raised if snapping is not supported """ raise NotImplementedError @@ -121,24 +138,30 @@ def sample(self, samples=None, bounds=None, closest=False, **sample_values): Sampling a range or grid of coordinates, e.g.: - 1D: ds.sample(3) - 2D: ds.sample((3, 3)) + 1D : ds.sample(3) + 2D : ds.sample((3, 3)) Sampling by keyword, e.g.: ds.sample(x=0) - Args: - samples: List of nd-coordinates to sample - bounds: Bounds of the region to sample - Defined as two-tuple for 1D sampling and four-tuple - for 2D sampling. - closest: Whether to snap to closest coordinates - **kwargs: Coordinates specified as keyword pairs - Keywords of dimensions and scalar coordinates - - Returns: - Element containing the sampled coordinates + Parameters + ---------- + samples + List of nd-coordinates to sample + bounds + Bounds of the region to sample + Defined as two-tuple for 1D sampling and four-tuple + for 2D sampling. + closest + Whether to snap to closest coordinates + **kwargs + Coordinates specified as keyword pairs + Keywords of dimensions and scalar coordinates + + Returns + ------- + Element containing the sampled coordinates """ if samples is None: samples = [] @@ -159,19 +182,25 @@ def reduce(self, dimensions=None, function=None, spreadfn=None, **reduction): ds.reduce(x=np.mean) - Args: - dimensions: Dimension(s) to apply reduction on - Defaults to all key dimensions - function: Reduction operation to apply, e.g. numpy.mean - spreadfn: Secondary reduction to compute value spread - Useful for computing a confidence interval, spread, or - standard deviation. - **reductions: Keyword argument defining reduction - Allows reduction to be defined as keyword pair of - dimension and function - - Returns: - The element after reductions have been applied. + Parameters + ---------- + dimensions + Dimension(s) to apply reduction on + Defaults to all key dimensions + function + Reduction operation to apply, e.g. numpy.mean + spreadfn + Secondary reduction to compute value spread + Useful for computing a confidence interval, spread, or + standard deviation. + **reductions + Keyword argument defining reduction + Allows reduction to be defined as keyword pair of + dimension and function + + Returns + ------- + The element after reductions have been applied. """ if dimensions is None: dimensions = [] @@ -203,12 +232,16 @@ def dframe(self, dimensions=None, multi_index=False): Returns a pandas dataframe of columns along each dimension, either completely flat or indexed by key dimensions. - Args: - dimensions: Dimensions to return as columns - multi_index: Convert key dimensions to (multi-)index + Parameters + ---------- + dimensions + Dimensions to return as columns + multi_index + Convert key dimensions to (multi-)index - Returns: - DataFrame of columns corresponding to each dimension + Returns + ------- + DataFrame of columns corresponding to each dimension """ if dimensions is None: dimensions = [d.name for d in self.dimensions()] @@ -225,11 +258,14 @@ def dframe(self, dimensions=None, multi_index=False): def array(self, dimensions=None): """Convert dimension values to columnar array. - Args: - dimensions: List of dimensions to return + Parameters + ---------- + dimensions + List of dimensions to return - Returns: - Array of columns corresponding to each dimension + Returns + ------- + Array of columns corresponding to each dimension """ if dimensions is None: dims = [d for d in self.kdims + self.vdims] @@ -247,33 +283,41 @@ def array(self, dimensions=None): class Tabular(Element): - """ - Baseclass to give an elements providing an API to generate a + """Baseclass to give an elements providing an API to generate a tabular representation of the object. + """ __abstract = True @property def rows(self): - "Number of rows in table (including header)" + """Number of rows in table (including header) + + """ return len(self) + 1 @property def cols(self): - "Number of columns in table" + """Number of columns in table + + """ return len(self.dimensions()) def pprint_cell(self, row, col): """Formatted contents of table cell. - Args: - row (int): Integer index of table row - col (int): Integer index of table column + Parameters + ---------- + row : int + Integer index of table row + col : int + Integer index of table column - Returns: - Formatted table cell contents + Returns + ------- + Formatted table cell contents """ ndims = self.ndims if col >= self.cols: @@ -295,12 +339,16 @@ def pprint_cell(self, row, col): def cell_type(self, row, col): """Type of the table cell, either 'data' or 'heading' - Args: - row (int): Integer index of table row - col (int): Integer index of table column + Parameters + ---------- + row : int + Integer index of table row + col : int + Integer index of table column - Returns: - Type of the table cell, either 'data' or 'heading' + Returns + ------- + Type of the table cell, either 'data' or 'heading' """ return 'heading' if row == 0 else 'data' @@ -327,13 +375,13 @@ class Element3D(Element2D): class Collator(NdMapping): - """ - Collator is an NdMapping type which can merge any number + """Collator is an NdMapping type which can merge any number of HoloViews components with whatever level of nesting by inserting the Collators key dimensions on the HoloMaps. If the items in the Collator do not contain HoloMaps they will be created. Collator also supports filtering of Tree structures and dropping of constant dimensions. + """ drop = param.List(default=[], doc=""" @@ -387,12 +435,12 @@ def __init__(self, data=None, **params): def __call__(self): - """ - Filter each Layout in the Collator with the supplied + """Filter each Layout in the Collator with the supplied path_filters. If merge is set to True all Layouts are merged, otherwise an NdMapping containing all the Layouts is returned. Optionally a list of dimensions to be ignored can be supplied. + """ constant_dims = self.static_dimensions ndmapping = NdMapping(kdims=self.kdims) @@ -427,8 +475,8 @@ def __call__(self): @property def static_dimensions(self): - """ - Return all constant dimensions. + """Return all constant dimensions. + """ dimensions = [] for dim in self.kdims: @@ -438,10 +486,10 @@ def static_dimensions(self): def _add_dimensions(self, item, dims, constant_keys): - """ - Recursively descend through an Layout and NdMapping objects + """Recursively descend through an Layout and NdMapping objects in order to add the supplied dimension values to all contained HoloMaps. + """ if isinstance(item, Layout): item.fixed = False diff --git a/holoviews/core/io.py b/holoviews/core/io.py index 686289a39c..346ce13d73 100644 --- a/holoviews/core/io.py +++ b/holoviews/core/io.py @@ -1,16 +1,16 @@ -""" -Module defining input/output interfaces to HoloViews. +"""Module defining input/output interfaces to HoloViews. There are two components for input/output: -Exporters: Process (composite) HoloViews objects one at a time. For +Exporters : Process (composite) HoloViews objects one at a time. For instance, an exporter may render a HoloViews object as a svg or perhaps pickle it. -Archives: A collection of HoloViews objects that are first collected +Archives : A collection of HoloViews objects that are first collected then processed together. For instance, collecting HoloViews objects for a report then generating a PDF or collecting HoloViews objects to dump to HDF5. + """ import itertools import os @@ -37,8 +37,8 @@ def sanitizer(name, replacements=None): - """ - String sanitizer to avoid problematic characters in filenames. + """String sanitizer to avoid problematic characters in filenames. + """ if replacements is None: replacements = [(':', '_'), ('/', '_'), ('\\', '_')] @@ -48,8 +48,7 @@ def sanitizer(name, replacements=None): class Reference(param.Parameterized): - """ - A Reference allows access to an object to be deferred until it is + """A Reference allows access to an object to be deferred until it is needed in the appropriate context. References are used by Collector to capture the state of an object at collection time. @@ -61,35 +60,36 @@ class Reference(param.Parameterized): A Reference only needs to have a resolved_type property and a resolve method. The constructor will take some specification of where to find the target object (may be the object itself). + """ @property def resolved_type(self): - """ - Returns the type of the object resolved by this references. If + """Returns the type of the object resolved by this references. If multiple types are possible, the return is a tuple of types. + """ raise NotImplementedError def resolve(self, container=None): - """ - Return the referenced object. Optionally, a container may be + """Return the referenced object. Optionally, a container may be passed in from which the object is to be resolved. + """ raise NotImplementedError class Exporter(param.ParameterizedFunction): - """ - An Exporter is a parameterized function that accepts a HoloViews + """An Exporter is a parameterized function that accepts a HoloViews object and converts it to a new some new format. This mechanism is designed to be very general so here are a few examples: - Pickling: Native Python, supported by HoloViews. - Rendering: Any plotting backend may be used (default uses matplotlib) - Storage: Saving to a database (e.g. SQL), HDF5 etc. + Pickling : Native Python, supported by HoloViews. + Rendering : Any plotting backend may be used (default uses matplotlib) + Storage : Saving to a database (e.g. SQL), HDF5 etc. + """ # Mime-types that need encoding as utf-8 upon export @@ -116,10 +116,10 @@ class Exporter(param.ParameterizedFunction): @classmethod def encode(cls, entry): - """ - Classmethod that applies conditional encoding based on + """Classmethod that applies conditional encoding based on mime-type. Given an entry as returned by __call__ return the data in the appropriate encoding. + """ (data, info) = entry if info['mime_type'] in cls.utf8_mime_types: @@ -129,7 +129,9 @@ def encode(cls, entry): @bothmethod def _filename(self_or_cls, filename): - "Add the file extension if not already present" + """Add the file extension if not already present + + """ filename = os.fspath(filename) if not filename.endswith(self_or_cls.file_ext): return f'{filename}.{self_or_cls.file_ext}' @@ -138,16 +140,15 @@ def _filename(self_or_cls, filename): @bothmethod def _merge_metadata(self_or_cls, obj, fn, *dicts): - """ - Returns a merged metadata info dictionary from the supplied + """Returns a merged metadata info dictionary from the supplied function and additional dictionaries + """ merged = {k:v for d in dicts for (k,v) in d.items()} return dict(merged, **fn(obj)) if fn else merged def __call__(self, obj, fmt=None): - """ - Given a HoloViews object return the raw exported data and + """Given a HoloViews object return the raw exported data and corresponding metadata as the tuple (data, metadata). The metadata should include: @@ -157,14 +158,14 @@ def __call__(self, obj, fmt=None): The fmt argument may be used with exporters that support multiple output formats. If not supplied, the exporter is to pick an appropriate format automatically. + """ raise NotImplementedError("Exporter not implemented.") @bothmethod def save(self_or_cls, obj, basename, fmt=None, key=None, info=None, **kwargs): - """ - Similar to the call method except saves exporter data to disk + """Similar to the call method except saves exporter data to disk into a file with specified basename. For exporters that support multiple formats, the fmt argument may also be supplied (which typically corresponds to the file-extension). @@ -172,6 +173,7 @@ def save(self_or_cls, obj, basename, fmt=None, key=None, info=None, **kwargs): The supplied metadata key and info dictionaries will be used to update the output of the relevant key and info functions which is then saved (if supported). + """ if info is None: info = {} @@ -182,31 +184,31 @@ def save(self_or_cls, obj, basename, fmt=None, key=None, info=None, **kwargs): class Importer(param.ParameterizedFunction): - """ - An Importer is a parameterized function that accepts some data in + """An Importer is a parameterized function that accepts some data in some format and returns a HoloViews object. This mechanism is designed to be very general so here are a few examples: - Unpickling: Native Python, supported by HoloViews. - Servers: Loading data over a network connection. - Storage: Loading from a database (e.g. SQL), HDF5 etc. + Unpickling : Native Python, supported by HoloViews. + Servers : Loading data over a network connection. + Storage : Loading from a database (e.g. SQL), HDF5 etc. + """ def __call__(self, data): - """ - Given raw data in the appropriate format return the + """Given raw data in the appropriate format return the corresponding HoloViews object. Acts as the inverse of Exporter when supplied the data portion of an Exporter's output. + """ raise NotImplementedError("Importer not implemented.") @bothmethod def load(self_or_cls, src, entries=None): - """ - Given some source (e.g. a filename, a network connection etc), + """Given some source (e.g. a filename, a network connection etc), return the loaded HoloViews object. + """ raise NotImplementedError("Importer load method not implemented.") @@ -218,22 +220,24 @@ def loader(self_or_cls, kwargs): @bothmethod def info(self_or_cls, src): - """ - Returns the 'info' portion of the metadata (if available). + """Returns the 'info' portion of the metadata (if available). + """ raise NotImplementedError("Importer info method not implemented.") @bothmethod def key(self_or_cls, src): - """ - Returns the metadata key (if available). + """Returns the metadata key (if available). + """ raise NotImplementedError("Importer keys method not implemented.") class Serializer(Exporter): - "A generic exporter that supports any arbitrary serializer" + """A generic exporter that supports any arbitrary serializer + + """ serializer=param.Callable(default=Store.dumps, doc=""" The serializer function, set to Store.dumps by default. The @@ -273,7 +277,9 @@ def save(self_or_cls, obj, filename, info=None, key=None, **kwargs): class Deserializer(Importer): - "A generic importer that supports any arbitrary de-serializer." + """A generic importer that supports any arbitrary de-serializer. + + """ deserializer=param.Callable(default=Store.load, doc=""" The deserializer function, set to Store.load by default. The @@ -316,8 +322,7 @@ def info(self_or_cls, filename): class Pickler(Exporter): - """ - The recommended pickler for serializing HoloViews object to a .hvz + """The recommended pickler for serializing HoloViews object to a .hvz file (a simple zip archive of pickle files). In addition to the functionality offered by Store.dump and Store.load, this file format offers three additional features: @@ -328,6 +333,7 @@ class Pickler(Exporter): The output file with the .hvz file extension is simply a zip archive containing pickled HoloViews objects. + """ protocol = param.Integer(default=2, doc=""" @@ -382,8 +388,7 @@ def save(self_or_cls, obj, filename, key=None, info=None, **kwargs): class Unpickler(Importer): - """ - The inverse of Pickler used to load the .hvz file format which is + """The inverse of Pickler used to load the .hvz file format which is simply a zip archive of pickle objects. Unlike a regular pickle file, info and key metadata as well as @@ -392,6 +397,7 @@ class Unpickler(Importer): The components that may be individually loaded may be found using the entries method. + """ def __call__(self, data, entries=None): @@ -439,8 +445,7 @@ def entries(self_or_cls, filename): @bothmethod def collect(self_or_cls, files, drop=None, metadata=True): - """ - Given a list or NdMapping type containing file paths return a + """Given a list or NdMapping type containing file paths return a Layout of Collators, which can be called to load a given set of files using the current Importer. @@ -449,6 +454,7 @@ def collect(self_or_cls, files, drop=None, metadata=True): supplied additional key dimensions may be supplied as long as they do not clash with the file metadata. Any key dimension may be dropped by name by supplying a drop argument. + """ if drop is None: drop = [] @@ -494,14 +500,14 @@ def collect(self_or_cls, files, drop=None, metadata=True): class Archive(param.Parameterized): - """ - An Archive is a means to collect and store a collection of + """An Archive is a means to collect and store a collection of HoloViews objects in any number of different ways. Examples of possible archives: * Generating tar or zip files (compressed or uncompressed). * Collating a report or document (e.g. PDF, HTML, LaTex). * Storing a collection of HoloViews objects to a database or HDF5. + """ exporters= param.List(default=[], doc=""" @@ -509,30 +515,29 @@ class Archive(param.Parameterized): appropriate format(s).""" ) def add(self, obj, *args, **kwargs): - """ - Add a HoloViews object to the archive. + """Add a HoloViews object to the archive. + """ raise NotImplementedError def export(self,*args, **kwargs): - """ - Finalize and close the archive. + """Finalize and close the archive. + """ raise NotImplementedError def simple_name_generator(obj): - """ - Simple name_generator designed for HoloViews objects. + """Simple name_generator designed for HoloViews objects. Objects are labeled with {group}-{label} for each nested object, based on a depth-first search. Adjacent objects with identical representations yield only a single copy of the representation, to avoid long names for the common case of a container whose element(s) share the same group and label. - """ + """ if isinstance(obj, LabelledData): labels = obj.traverse(lambda x: (x.group + ('-' +x.label if x.label else ''))) @@ -545,9 +550,9 @@ def simple_name_generator(obj): class FileArchive(Archive): - """ - A file archive stores files on disk, either unpacked in a + """A file archive stores files on disk, either unpacked in a directory or in an archive format (e.g. a zip file). + """ exporters= param.List(default=[Pickler], doc=""" @@ -629,7 +634,9 @@ class FileArchive(Archive): @classmethod def parse_fields(cls, formatter): - "Returns the format fields otherwise raise exception" + """Returns the format fields otherwise raise exception + + """ if formatter is None: return [] try: parse = list(string.Formatter().parse(formatter)) @@ -678,14 +685,14 @@ def _validate_formatters(self): def add(self, obj=None, filename=None, data=None, info=None, **kwargs): - """ - If a filename is supplied, it will be used. Otherwise, a + """If a filename is supplied, it will be used. Otherwise, a filename will be generated from the supplied object. Note that if the explicit filename uses the {timestamp} field, it will be formatted upon export. The data to be archived is either supplied explicitly as 'data' or automatically rendered from the object. + """ if info is None: info = {} @@ -790,14 +797,14 @@ def _directory_archive(self, export_name, files, root): def _unique_name(self, basename, ext, existing, force=False): - """ - Find a unique basename for a new file/key where existing is + """Find a unique basename for a new file/key where existing is either a list of (basename, ext) pairs or an absolute path to a directory. By default, uniqueness is enforced depending on the state of the unique_name parameter (for export names). If force is True, this parameter is ignored and uniqueness is guaranteed. + """ skip = False if force else (not self.unique_name) if skip: return (basename, ext) @@ -832,8 +839,8 @@ def _normalize_name(self, basename): def export(self, timestamp=None, info=None): - """ - Export the archive, directory or file. + """Export the archive, directory or file. + """ if info is None: info = {} @@ -863,14 +870,18 @@ def _format(self, formatter, info): return formatter.format(**filtered) def __len__(self): - "The number of files currently specified in the archive" + """The number of files currently specified in the archive. + + """ return len(self._files) def __repr__(self): return self.param.pprint() def contents(self, maxlen=70): - "Print the current (unexported) contents of the archive" + """Print the current (unexported) contents of the archive. + + """ lines = [] if len(self._files) == 0: print(f"Empty {self.__class__.__name__}") @@ -884,9 +895,13 @@ def contents(self, maxlen=70): print('\n'.join(lines)) def listing(self): - "Return a list of filename entries currently in the archive" + """Return a list of filename entries currently in the archive. + + """ return [f'{f}.{ext}' if ext else f for (f,ext) in self._files.keys()] def clear(self): - "Clears the file archive" + """Clears the file archive + + """ self._files.clear() diff --git a/holoviews/core/layout.py b/holoviews/core/layout.py index c3d68e8f6e..f856f05b01 100644 --- a/holoviews/core/layout.py +++ b/holoviews/core/layout.py @@ -1,8 +1,8 @@ -""" -Supplies Pane, Layout, NdLayout and AdjointLayout. Pane extends View +"""Supplies Pane, Layout, NdLayout and AdjointLayout. Pane extends View to allow multiple Views to be presented side-by-side in a NdLayout. An AdjointLayout allows one or two Views to be adjoined to a primary View to act as supplementary elements. + """ import numpy as np @@ -14,12 +14,13 @@ class Layoutable: - """ - Layoutable provides a mix-in class to support the + """Layoutable provides a mix-in class to support the add operation for creating a layout from the operands. + """ + def __add__(x, y): - "Compose objects into a Layout" + """Compose objects into a Layout""" if any(isinstance(arg, int) for arg in (x, y)): raise TypeError(f"unsupported operand type(s) for +: {x.__class__.__name__} and {y.__class__.__name__}. " "If you are trying to use a reduction like `sum(elements)` " @@ -36,13 +37,15 @@ def __radd__(self, other): class Composable(Layoutable): - """ - Composable is a mix-in class to allow Dimensioned objects to be + """Composable is a mix-in class to allow Dimensioned objects to be embedded within Layouts and GridSpaces. + """ def __lshift__(self, other): - "Compose objects into an AdjointLayout" + """Compose objects into an AdjointLayout + + """ if isinstance(other, (ViewableElement, NdMapping, Empty)): return AdjointLayout([self, other]) elif isinstance(other, AdjointLayout): @@ -53,11 +56,11 @@ def __lshift__(self, other): class Empty(Dimensioned, Composable): - """ - Empty may be used to define an empty placeholder in a Layout. It + """Empty may be used to define an empty placeholder in a Layout. It can be placed in a Layout just like any regular Element and container type via the + operator or by passing it to the Layout constructor as a part of a list. + """ group = param.String(default='Empty') @@ -68,8 +71,7 @@ def __init__(self, **params): class AdjointLayout(Layoutable, Dimensioned): - """ - An AdjointLayout provides a convenient container to lay out some + """An AdjointLayout provides a convenient container to lay out some marginal plots next to a primary plot. This is often useful to display the marginal distributions of a plot next to the primary plot. AdjointLayout accepts a list of up to three elements, which @@ -83,6 +85,7 @@ class AdjointLayout(Layoutable, Dimensioned): | 1 | 2 | 3: top | | | |___________|___| + """ kdims = param.List(default=[Dimension('AdjointLayout')], constant=True) @@ -163,7 +166,9 @@ def __rmul__(self, other): @property def group(self): - "Group inherited from main element" + """Group inherited from main element + + """ if self.main and self.main.group != type(self.main).__name__: return self.main.group else: @@ -171,7 +176,9 @@ def group(self): @property def label(self): - "Label inherited from main element" + """Label inherited from main element + + """ return self.main.label if self.main else '' @@ -187,30 +194,40 @@ def relabel(self, label=None, group=None, depth=1): Applies relabeling to child up to the supplied depth. - Args: - label (str, optional): New label to apply to returned object - group (str, optional): New group to apply to returned object - depth (int, optional): Depth to which relabel will be applied - If applied to container allows applying relabeling to - contained objects up to the specified depth - - Returns: - Returns relabelled object + Parameters + ---------- + label : str, optional + New label to apply to returned object + group : str, optional + New group to apply to returned object + depth : int, optional + Depth to which relabel will be applied + If applied to container allows applying relabeling to + contained objects up to the specified depth + + Returns + ------- + Returns relabelled object """ return super().relabel(label=label, group=group, depth=depth) def get(self, key, default=None): - """ - Returns the viewable corresponding to the supplied string + """Returns the viewable corresponding to the supplied string or integer based key. - Args: - key: Numeric or string index: 0) 'main' 1) 'right' 2) 'top' - default: Value returned if key not found - - Returns: - Indexed value or supplied default + Parameters + ---------- + key : Numeric or string index + 0: 'main' + 1: 'right' + 2: 'top' + default + Value returned if key not found + + Returns + ------- + Indexed value or supplied default """ return self.data[key] if key in self.data else default @@ -220,25 +237,32 @@ def dimension_values(self, dimension, expanded=True, flat=True): Applies to the main object in the AdjointLayout. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar: If false returns unique values + * Geometry: If false returns scalar values per geometry + * Gridded: If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ dimension = self.get_dimension(dimension, strict=True).name return self.main.dimension_values(dimension, expanded, flat) def __getitem__(self, key): - "Index into the AdjointLayout by index or label" + """Index into the AdjointLayout by index or label + + """ if key == (): return self @@ -276,7 +300,9 @@ def __setitem__(self, key, value): def __lshift__(self, other): - "Add another plot to the AdjointLayout" + """Add another plot to the AdjointLayout + + """ views = [self.data.get(k, None) for k in self.layout_order] return AdjointLayout([v for v in views if v is not None] + [other]) @@ -287,17 +313,23 @@ def ddims(self): @property def main(self): - "Returns the main element in the AdjointLayout" + """Returns the main element in the AdjointLayout + + """ return self.data.get('main', None) @property def right(self): - "Returns the right marginal element in the AdjointLayout" + """Returns the right marginal element in the AdjointLayout + + """ return self.data.get('right', None) @property def top(self): - "Returns the top marginal element in the AdjointLayout" + """Returns the top marginal element in the AdjointLayout + + """ return self.data.get('top', None) @property @@ -322,17 +354,19 @@ def __iter__(self): i += 1 def __len__(self): - "Number of items in the AdjointLayout" + """Number of items in the AdjointLayout + + """ return len(self.data) class NdLayout(Layoutable, UniformNdMapping): - """ - NdLayout is a UniformNdMapping providing an n-dimensional + """NdLayout is a UniformNdMapping providing an n-dimensional data structure to display the contained Elements and containers in a layout. Using the cols method the NdLayout can be rearranged with the desired number of columns. + """ data_type = (ViewableElement, AdjointLayout, UniformNdMapping) @@ -351,7 +385,9 @@ def uniform(self): @property def shape(self): - "Tuple indicating the number of rows and columns in the NdLayout." + """Tuple indicating the number of rows and columns in the NdLayout. + + """ num = len(self.keys()) if num <= self._max_cols: return (1, num) @@ -361,9 +397,9 @@ def shape(self): def grid_items(self): - """ - Compute a dict of {(row,column): (key, value)} elements from the + """Compute a dict of {(row,column): (key, value)} elements from the current set of items and specified number of columns. + """ if list(self.keys()) == []: return {} cols = self._max_cols @@ -378,17 +414,19 @@ def cols(self, ncols): row. The number of columns control the indexing and display semantics of the NdLayout. - Args: - ncols (int): Number of columns to set on the NdLayout + Parameters + ---------- + ncols : int + Number of columns to set on the NdLayout """ self._max_cols = ncols return self @property def last(self): - """ - Returns another NdLayout constituted of the last views of the + """Returns another NdLayout constituted of the last views of the individual elements (if they are maps). + """ last_items = [] for (k, v) in self.items(): @@ -405,15 +443,22 @@ def last(self): def clone(self, *args, **overrides): """Clones the NdLayout, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned NdLayout object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned NdLayout object """ clone = super().clone(*args, **overrides) clone._max_cols = self._max_cols @@ -422,15 +467,17 @@ def clone(self, *args, **overrides): class Layout(Layoutable, ViewableTree): - """ - A Layout is an ViewableTree with ViewableElement objects as leaf - values. Unlike ViewableTree, a Layout supports a rich display, + """A Layout is an ViewableTree with ViewableElement objects as leaf + values. + + Unlike ViewableTree, a Layout supports a rich display, displaying leaf items in a grid style layout. In addition to the usual ViewableTree indexing, Layout supports indexing of items by their row and column index in the layout. The maximum number of columns in such a layout may be controlled with the cols method. + """ group = param.String(default='Layout', constant=True) @@ -453,15 +500,18 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns a Layout + Returns + ------- + DynamicMap that returns a Layout """ from .decollate import decollate return decollate(self) @property def shape(self): - "Tuple indicating the number of rows and columns in the Layout." + """Tuple indicating the number of rows and columns in the Layout. + + """ num = len(self) if num <= self._max_cols: return (1, num) @@ -471,7 +521,9 @@ def shape(self): def __getitem__(self, key): - "Allows indexing Layout by row and column or path" + """Allows indexing Layout by row and column or path + + """ if isinstance(key, int): if key < len(self): return list(self.data.values())[key] @@ -493,15 +545,22 @@ def __getitem__(self, key): def clone(self, *args, **overrides): """Clones the Layout, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned Layout object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned Layout object """ clone = super().clone(*args, **overrides) clone._max_cols = self._max_cols @@ -515,8 +574,10 @@ def cols(self, ncols): row. The number of columns control the indexing and display semantics of the NdLayout. - Args: - ncols (int): Number of columns to set on the NdLayout + Parameters + ---------- + ncols : int + Number of columns to set on the NdLayout """ self._max_cols = ncols return self @@ -526,15 +587,20 @@ def relabel(self, label=None, group=None, depth=1): Applies relabeling to children up to the supplied depth. - Args: - label (str, optional): New label to apply to returned object - group (str, optional): New group to apply to returned object - depth (int, optional): Depth to which relabel will be applied - If applied to container allows applying relabeling to - contained objects up to the specified depth - - Returns: - Returns relabelled object + Parameters + ---------- + label : str, optional + New label to apply to returned object + group : str, optional + New group to apply to returned object + depth : int, optional + Depth to which relabel will be applied + If applied to container allows applying relabeling to + contained objects up to the specified depth + + Returns + ------- + Returns relabelled object """ return super().relabel(label, group, depth) diff --git a/holoviews/core/ndmapping.py b/holoviews/core/ndmapping.py index 78fb1b3aaa..f59748ad74 100644 --- a/holoviews/core/ndmapping.py +++ b/holoviews/core/ndmapping.py @@ -1,7 +1,7 @@ -""" -Supplies MultiDimensionalMapping and NdMapping which are multi-dimensional +"""Supplies MultiDimensionalMapping and NdMapping which are multi-dimensional map types. The former class only allows indexing whereas the latter also enables slicing over multiple dimension ranges. + """ from itertools import cycle @@ -24,12 +24,12 @@ class item_check: - """ - Context manager to allow creating NdMapping types without + """Context manager to allow creating NdMapping types without performing the usual item_checks, providing significant speedups when there are a lot of items. Should only be used when both keys and values are guaranteed to be the right type, as is the case for many internal operations. + """ def __init__(self, enabled): @@ -44,11 +44,11 @@ def __exit__(self, exc_type, exc_val, exc_tb): class sorted_context: - """ - Context manager to temporarily disable sorting on NdMapping + """Context manager to temporarily disable sorting on NdMapping types. Retains the current sort order, which can be useful as an optimization on NdMapping instances where sort=True but the items are already known to have been sorted. + """ def __init__(self, enabled): @@ -64,8 +64,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): class MultiDimensionalMapping(Dimensioned): - """ - An MultiDimensionalMapping is a Dimensioned mapping (like a + """An MultiDimensionalMapping is a Dimensioned mapping (like a dictionary or array) that uses fixed-length multidimensional keys. This behaves like a sparse N-dimensional array that does not require a dense sampling over the multidimensional space. @@ -87,6 +86,7 @@ class MultiDimensionalMapping(Dimensioned): useful slicing methods for selecting subsets of the data. Even so, keeping the slicing support separate from the indexing and data storage methods helps make both classes easier to understand. + """ group = param.String(default='MultiDimensionalMapping', constant=True) @@ -129,10 +129,10 @@ def __init__(self, initial_items=None, kdims=None, **params): def _item_check(self, dim_vals, data): - """ - Applies optional checks to individual data elements before + """Applies optional checks to individual data elements before they are inserted ensuring that they are of a certain type. Subclassed may implement further element restrictions. + """ if not self._check_items: return @@ -153,9 +153,9 @@ def _item_check(self, dim_vals, data): def _add_item(self, dim_vals, data, sort=True, update=True): - """ - Adds item to the data, applying dimension types and ensuring + """Adds item to the data, applying dimension types and ensuring key conforms to Dimension type and values. + """ sort = sort and self.sort if not isinstance(dim_vals, tuple): @@ -185,9 +185,9 @@ def _add_item(self, dim_vals, data, sort=True, update=True): def _apply_key_type(self, keys): - """ - If a type is specified by the corresponding key dimension, + """If a type is specified by the corresponding key dimension, this method applies the type to the supplied key. + """ typed_key = () for dim, key in zip(self.kdims, keys): @@ -208,10 +208,10 @@ def _apply_key_type(self, keys): def _split_index(self, key): - """ - Partitions key into key and deep dimension groups. If only key + """Partitions key into key and deep dimension groups. If only key indices are supplied, the data is indexed with an empty tuple. Keys with indices than there are dimensions will be padded. + """ if not isinstance(key, tuple): key = (key,) @@ -235,10 +235,10 @@ def _split_index(self, key): def _dataslice(self, data, indices): - """ - Returns slice of data element if the item is deep + """Returns slice of data element if the item is deep indexable. Warns if attempting to slice an object that has not been declared deep indexable. + """ if self._deep_indexable and isinstance(data, Dimensioned) and indices: return data[indices] @@ -256,18 +256,26 @@ def _resort(self): def clone(self, data=None, shared_data=True, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - link (bool, optional): Whether clone should be linked - Determines whether Streams and Links attached to - original object will be inherited. - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + link : bool, optional + Whether clone should be linked + Determines whether Streams and Links attached to + original object will be inherited. + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned object """ with item_check(not shared_data and self._check_items): return super().clone(data, shared_data, *args, **overrides) @@ -280,16 +288,23 @@ def groupby(self, dimensions, container_type=None, group_type=None, **kwargs): returning an object of type container_type (expected to be dictionary-like) containing the groups. - Args: - dimensions: Dimension(s) to group by - container_type: Type to cast group container to - group_type: Type to cast each group to - dynamic: Whether to return a DynamicMap - **kwargs: Keyword arguments to pass to each group - - Returns: - Returns object of supplied container_type containing the - groups. If dynamic=True returns a DynamicMap instead. + Parameters + ---------- + dimensions + Dimension(s) to group by + container_type + Type to cast group container to + group_type + Type to cast each group to + dynamic + Whether to return a DynamicMap + **kwargs + Keyword arguments to pass to each group + + Returns + ------- + Returns object of supplied container_type containing the + groups. If dynamic=True returns a DynamicMap instead. """ if self.ndims == 1: self.param.warning('Cannot split Map with only one dimension.') @@ -312,15 +327,22 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): the key dimensions and a key value scalar or sequence of the same length as the existing keys. - Args: - dimension: Dimension or dimension spec to add - dim_pos (int) Integer index to insert dimension at - dim_val (scalar or ndarray): Dimension value(s) to add - vdim: Disabled, this type does not have value dimensions - **kwargs: Keyword arguments passed to the cloned element - - Returns: - Cloned object containing the new dimension + Parameters + ---------- + dimension + Dimension or dimension spec to add + dim_pos : int + Integer index to insert dimension at + dim_val : scalar or ndarray + Dimension value(s) to add + vdim + Disabled, this type does not have value dimensions + **kwargs + Keyword arguments passed to the cloned element + + Returns + ------- + Cloned object containing the new dimension """ dimension = asdim(dimension) @@ -363,11 +385,14 @@ def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs): def drop_dimension(self, dimensions): """Drops dimension(s) from keys - Args: - dimensions: Dimension(s) to drop + Parameters + ---------- + dimensions + Dimension(s) to drop - Returns: - Clone of object with with dropped dimension(s) + Returns + ------- + Clone of object with with dropped dimension(s) """ dimensions = [dimensions] if np.isscalar(dimensions) else dimensions dims = [d for d in self.kdims if d not in dimensions] @@ -380,18 +405,23 @@ def drop_dimension(self, dimensions): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar: If false returns unique values + * Geometry: If false returns scalar values per geometry + * Gridded: If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ dimension = self.get_dimension(dimension, strict=True) if dimension in self.kdims: @@ -416,12 +446,16 @@ def reindex(self, kdims=None, force=False): created object as the new labels must be sufficient to address each value uniquely. - Args: - kdims (optional): New list of key dimensions after reindexing - force (bool, optional): Whether to drop non-unique items + Parameters + ---------- + kdims : optional + New list of key dimensions after reindexing + force : bool, optional + Whether to drop non-unique items - Returns: - Reindexed object + Returns + ------- + Reindexed object """ if kdims is None: kdims = [] @@ -455,22 +489,26 @@ def reindex(self, kdims=None, force=False): @property def last(self): - "Returns the item highest data item along the map dimensions." + """Returns the item highest data item along the map dimensions. + + """ return list(self.data.values())[-1] if len(self) else None @property def last_key(self): - "Returns the last key value." + """Returns the last key value. + + """ return list(self.keys())[-1] if len(self) else None @property def info(self): - """ - Prints information about the Dimensioned object, including the + """Prints information about the Dimensioned object, including the number and type of objects contained within it and information about its dimensions. + """ if (len(self.values()) > 0): info_str = self.__class__.__name__ +\ @@ -495,9 +533,11 @@ def info(self): def update(self, other): """Merges other item with this object - Args: - other: Object containing items to merge into this object - Must be a dictionary or NdMapping type + Parameters + ---------- + other + Object containing items to merge into this object + Must be a dictionary or NdMapping type """ if isinstance(other, NdMapping): dims = [d for d in other.kdims if d not in self.kdims] @@ -514,7 +554,9 @@ def update(self, other): def keys(self): - " Returns the keys of all the elements." + """Returns the keys of all the elements. + + """ if self.ndims == 1: return [k[0] for k in self.data.keys()] else: @@ -522,17 +564,23 @@ def keys(self): def values(self): - "Returns the values of all the elements." + """Returns the values of all the elements. + + """ return list(self.data.values()) def items(self): - "Returns all elements as a list in (key,value) format." + """Returns all elements as a list in (key,value) format. + + """ return list(zip(list(self.keys()), list(self.values()))) def get(self, key, default=None): - "Standard get semantics for all mapping types" + """Standard get semantics for all mapping types + + """ try: if key is None: return None @@ -542,16 +590,18 @@ def get(self, key, default=None): def pop(self, key, default=None): - "Standard pop semantics for all mapping types" + """Standard pop semantics for all mapping types + + """ if not isinstance(key, tuple): key = (key,) return self.data.pop(key, default) def __getitem__(self, key): - """ - Allows multi-dimensional indexing in the order of the + """Allows multi-dimensional indexing in the order of the specified key dimensions, passing any additional indices to the data elements. + """ if key in [Ellipsis, ()]: return self @@ -560,7 +610,9 @@ def __getitem__(self, key): def __setitem__(self, key, value): - "Adds item to mapping" + """Adds item to mapping + + """ self._add_item(key, value, update=False) @@ -569,7 +621,9 @@ def __str__(self): def __iter__(self): - "Iterates over mapping values" + """Iterates over mapping values + + """ return iter(self.values()) @@ -584,24 +638,24 @@ def __len__(self): class NdMapping(MultiDimensionalMapping): - """ - NdMapping supports the same indexing semantics as + """NdMapping supports the same indexing semantics as MultiDimensionalMapping but also supports slicing semantics. Slicing semantics on an NdMapping is dependent on the ordering semantics of the keys. As MultiDimensionalMapping sort the keys, a slice on an NdMapping is effectively a way of filtering out the keys that are outside the slice range. + """ group = param.String(default='NdMapping', constant=True) def __getitem__(self, indexslice): - """ - Allows slicing operations along the key and data + """Allows slicing operations along the key and data dimensions. If no data slice is supplied it will return all data elements, otherwise it will return the requested slice of the data. + """ if isinstance(indexslice, np.ndarray) and indexslice.dtype.kind == 'b': if not len(indexslice) == len(self): @@ -642,8 +696,8 @@ def __getitem__(self, indexslice): def _expand_slice(self, indices): - """ - Expands slices containing steps into a list. + """Expands slices containing steps into a list. + """ keys = list(self.data.keys()) expanded = [] @@ -666,16 +720,16 @@ def _expand_slice(self, indices): def _transform_indices(self, indices): - """ - Identity function here but subclasses can implement transforms + """Identity function here but subclasses can implement transforms of the dimension indices from one coordinate system to another. + """ return indices def _generate_conditions(self, map_slice): - """ - Generates filter conditions used for slicing the data structure. + """Generates filter conditions used for slicing the data structure. + """ conditions = [] for dim, dim_slice in zip(self.kdims, map_slice): @@ -748,8 +802,7 @@ def _all_condition(self): class UniformNdMapping(NdMapping): - """ - A UniformNdMapping is a map of Dimensioned objects and is itself + """A UniformNdMapping is a map of Dimensioned objects and is itself indexed over a number of specified dimensions. The dimension may be a spatial dimension (i.e., a ZStack), time (specifying a frame sequence) or any other combination of Dimensions. @@ -758,6 +811,7 @@ class UniformNdMapping(NdMapping): and split along its and its containing Element's dimensions. Subclasses should implement the appropriate slicing, sampling and reduction methods for their Dimensioned type. + """ data_type = (ViewableElement, NdMapping) @@ -776,18 +830,26 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - link (bool, optional): Whether clone should be linked - Determines whether Streams and Links attached to - original object will be inherited. - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + link : bool, optional + Whether clone should be linked + Determines whether Streams and Links attached to + original object will be inherited. + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned object """ settings = self.param.values() if settings.get('group', None) != self._group: @@ -822,18 +884,24 @@ def collapse(self, dimensions=None, function=None, spreadfn=None, **kwargs): Useful to collapse stacks of objects into a single object, e.g. to average a stack of Images or Curves. - Args: - dimensions: Dimension(s) to collapse - Defaults to all key dimensions - function: Aggregation function to apply, e.g. numpy.mean - spreadfn: Secondary reduction to compute value spread - Useful for computing a confidence interval, spread, or - standard deviation. - **kwargs: Keyword arguments passed to the aggregation function - - Returns: - Returns the collapsed element or HoloMap of collapsed - elements + Parameters + ---------- + dimensions + Dimension(s) to collapse + Defaults to all key dimensions + function + Aggregation function to apply, e.g. numpy.mean + spreadfn + Secondary reduction to compute value spread + Useful for computing a confidence interval, spread, or + standard deviation. + **kwargs + Keyword arguments passed to the aggregation function + + Returns + ------- + Returns the collapsed element or HoloMap of collapsed + elements """ from .data import concat from .overlay import CompositeOverlay @@ -884,12 +952,16 @@ def dframe(self, dimensions=None, multi_index=False): Returns a pandas dataframe of columns along each dimension, either completely flat or indexed by key dimensions. - Args: - dimensions: Dimensions to return as columns - multi_index: Convert key dimensions to (multi-)index + Parameters + ---------- + dimensions + Dimensions to return as columns + multi_index + Convert key dimensions to (multi-)index - Returns: - DataFrame of columns corresponding to each dimension + Returns + ------- + DataFrame of columns corresponding to each dimension """ if dimensions is None: outer_dimensions = self.kdims @@ -928,7 +1000,9 @@ def dframe(self, dimensions=None, multi_index=False): @property def group(self): - "Group inherited from items" + """Group inherited from items + + """ if self._group: return self._group group = get_ndmapping_label(self, 'group') if len(self) else None @@ -946,7 +1020,9 @@ def group(self, group): @property def label(self): - "Label inherited from items" + """Label inherited from items + + """ if self._label: return self._label elif len(self): @@ -964,7 +1040,9 @@ def label(self, label): @property def type(self): - "The type of elements stored in the mapping." + """The type of elements stored in the mapping. + + """ if self._type is None and len(self): self._type = self.values()[0].__class__ return self._type diff --git a/holoviews/core/operation.py b/holoviews/core/operation.py index 8e0bde7b7d..dc2dec200a 100644 --- a/holoviews/core/operation.py +++ b/holoviews/core/operation.py @@ -1,6 +1,6 @@ -""" -Operations manipulate Elements, HoloMaps and Layouts, typically for +"""Operations manipulate Elements, HoloMaps and Layouts, typically for the purposes of analysis or visualization. + """ import param @@ -14,8 +14,7 @@ class Operation(param.ParameterizedFunction): - """ - An Operation process an Element or HoloMap at the level of + """An Operation process an Element or HoloMap at the level of individual elements or overlays. If a holomap is passed in as input, a processed holomap is returned as output where the individual elements have been transformed accordingly. An Operation may turn @@ -26,6 +25,7 @@ class Operation(param.ParameterizedFunction): dynamically. An Operation may also supply a list of Stream classes on a streams parameter, which can allow dynamic control over the parameters on the operation. + """ group = param.String(default='Operation', doc=""" @@ -84,12 +84,12 @@ class Operation(param.ParameterizedFunction): @classmethod def search(cls, element, pattern): - """ - Helper method that returns a list of elements that match the + """Helper method that returns a list of elements that match the given path pattern of form {type}.{group}.{label}. The input may be a Layout, an Overlay type or a single Element. + """ if isinstance(element, Layout): return [el for cell in element for el in cls.search(cell, pattern)] @@ -101,9 +101,9 @@ def search(cls, element, pattern): @classmethod def get_overlay_label(cls, overlay, default_label=''): - """ - Returns a label if all the elements of an overlay agree on a + """Returns a label if all the elements of an overlay agree on a consistent label, otherwise returns the default label. + """ if all(el.label==overlay.get(0).label for el in overlay): return overlay.get(0).label @@ -113,9 +113,9 @@ def get_overlay_label(cls, overlay, default_label=''): @classmethod def get_overlay_bounds(cls, overlay): - """ - Returns the extents if all the elements of an overlay agree on + """Returns the extents if all the elements of an overlay agree on a consistent extents, otherwise raises an exception. + """ if all(el.bounds==overlay.get(0).bounds for el in overlay): return overlay.get(0).bounds @@ -124,9 +124,9 @@ def get_overlay_bounds(cls, overlay): def _apply(self, element, key=None): - """ - Applies the operation to the element, executing any pre- and + """Applies the operation to the element, executing any pre- and post-processor hooks if defined. + """ kwargs = {} for hook in self._preprocess_hooks: @@ -162,19 +162,19 @@ def _apply(self, element, key=None): def _process(self, element, key=None): - """ - Process a single input element and outputs new single element + """Process a single input element and outputs new single element or overlay. If a HoloMap is passed into an Operation, the individual components are processed sequentially with the corresponding key passed as the optional key argument. + """ return element def process_element(self, element, key, **params): - """ - The process_element method allows a single element to be + """The process_element method allows a single element to be operated on given an externally supplied key. + """ if self._per_element and not isinstance(element, Element): return element.clone({k: self.process_element(el, key, **params) @@ -220,10 +220,10 @@ def __call__(self, element, **kwargs): class OperationCallable(Callable): - """ - OperationCallable allows wrapping an Operation and the objects it is + """OperationCallable allows wrapping an Operation and the objects it is processing to allow traversing the operations applied on a DynamicMap. + """ operation = param.ClassSelector(class_=Operation, doc=""" diff --git a/holoviews/core/options.py b/holoviews/core/options.py index db2ddf4c7d..029f343bd7 100644 --- a/holoviews/core/options.py +++ b/holoviews/core/options.py @@ -1,5 +1,4 @@ -""" -Options and OptionTrees allow different classes of options +"""Options and OptionTrees allow different classes of options (e.g. matplotlib-specific styles and plot specific parameters) to be defined separately from the core data structures and away from visualization specific code. @@ -49,10 +48,10 @@ def cleanup_custom_options(id, weakref=None): - """ - Cleans up unused custom trees if all objects referencing the + """Cleans up unused custom trees if all objects referencing the custom id have been garbage collected or tree is otherwise unreferenced. + """ try: if Store._options_context: @@ -82,9 +81,9 @@ def cleanup_custom_options(id, weakref=None): def lookup_options(obj, group, backend): - """ - Given a HoloViews object, a plot option group (e.g. 'style') and + """Given a HoloViews object, a plot option group (e.g. 'style') and backend, return the corresponding Options object. + """ plot_class = None try: @@ -103,28 +102,29 @@ def lookup_options(obj, group, backend): class CallbackError(RuntimeError): - """ - An error raised during a callback. + """An error raised during a callback. + """ class SkipRendering(Exception): - """ - A SkipRendering exception in the plotting code will make the display + """A SkipRendering exception in the plotting code will make the display hooks fall back to a text repr. Used to skip rendering of DynamicMaps with exhausted element generators. + """ + def __init__(self, message="", warn=True): self.warn = warn super().__init__(message) class OptionError(Exception): - """ - Custom exception raised when there is an attempt to apply invalid + """Custom exception raised when there is an attempt to apply invalid options. Stores the necessary information to construct a more readable message for the user if caught and processed appropriately. + """ def __init__(self, invalid_keyword, allowed_keywords, @@ -147,8 +147,8 @@ def message(self, invalid_keyword, allowed_keywords, group_name, path): return msg def format_options_error(self): - """ - Return a fuzzy match message based on the OptionError + """Return a fuzzy match message based on the OptionError + """ allowed_keywords = self.allowed_keywords target = allowed_keywords.target @@ -180,14 +180,15 @@ def format_options_error(self): class AbbreviatedException(Exception): - """ - Raised by the abbreviate_exception context manager when it is + """Raised by the abbreviate_exception context manager when it is appropriate to present an abbreviated the traceback and exception message in the notebook. Particularly useful when processing style options supplied by the user which may not be valid. + """ + def __init__(self, etype, value, traceback): self.etype = etype self.value = value @@ -202,18 +203,19 @@ def __str__(self): ) def print_traceback(self): - """ - Print the traceback of the exception wrapped by the AbbreviatedException. + """Print the traceback of the exception wrapped by the AbbreviatedException. + """ traceback.print_exception(self.etype, self.value, self.traceback) class abbreviated_exception: - """ - Context manager used to to abbreviate tracebacks using an + """Context manager used to to abbreviate tracebacks using an AbbreviatedException when a backend may raise an error due to incorrect style options. + """ + def __enter__(self): return self @@ -224,9 +226,9 @@ def __exit__(self, etype, value, traceback): @contextmanager def options_policy(skip_invalid, warn_on_skip): - """ - Context manager to temporarily set the skip_invalid and warn_on_skip + """Context manager to temporarily set the skip_invalid and warn_on_skip class parameters on Options. + """ settings = (Options.skip_invalid, Options.warn_on_skip) (Options.skip_invalid, Options.warn_on_skip) = (skip_invalid, warn_on_skip) @@ -237,8 +239,7 @@ class parameters on Options. class Keywords: - """ - A keywords objects represents a set of Python keywords. It is + """A keywords objects represents a set of Python keywords. It is list-like and ordered but it is also a set without duplicates. When passed as **kwargs, Python keywords are not ordered but this class always lists keywords in sorted order. @@ -248,6 +249,7 @@ class Keywords: This class is for internal use only and should not be in the user namespace. + """ def __init__(self, values=None, target=None): @@ -267,9 +269,9 @@ def __add__(self, other): return Keywords(sorted(set(self.values + other.values)), target=target) def fuzzy_match(self, kw): - """ - Given a string, fuzzy match against the Keyword values, + """Given a string, fuzzy match against the Keyword values, returning a list of close matches. + """ return difflib.get_close_matches(kw, self.values) @@ -290,12 +292,12 @@ def __contains__(self, val): return val in self.values class Cycle(param.Parameterized): - """ - A simple container class that specifies cyclic options. A typical + """A simple container class that specifies cyclic options. A typical example would be to cycle the curve colors in an Overlay composed of an arbitrary number of curves. The values may be supplied as an explicit list or a key to look up in the default cycles attribute. + """ key = param.String(default='default_colors', allow_None=True, doc=""" @@ -363,8 +365,7 @@ def grayscale(val): class Palette(Cycle): - """ - Palettes allow easy specifying a discrete sampling + """Palettes allow easy specifying a discrete sampling of an existing colormap. Palettes may be supplied a key to look up a function function in the colormap class attribute. The function should accept a float scalar @@ -374,6 +375,7 @@ class Palette(Cycle): The range and samples may conveniently be overridden with the __getitem__ method. + """ key = param.String(default='grayscale', doc=""" @@ -401,12 +403,12 @@ def __init__(self, key, **params): def __getitem__(self, slc): - """ - Provides a convenient interface to override the + """Provides a convenient interface to override the range and samples parameters of the Cycle. Supplying a slice step or index overrides the number of samples. Unsupplied slice values will be inherited. + """ (start, stop), step = self.range, self.samples if isinstance(slc, slice): @@ -430,14 +432,14 @@ def _get_values(self): class Options: - """ - An Options object holds a collection of keyword options. In + """An Options object holds a collection of keyword options. In addition, Options support (optional) keyword validation as well as infinite indexing over the set of supplied cyclic values. Options support inheritance of setting values via the __call__ method. By calling an Options object with additional keywords, you can create a new Options object inheriting the parent options. + """ # Whether all Options instances should skip invalid keywords or @@ -486,17 +488,17 @@ def __init__(self, key=None, allowed_keywords=None, merge_keywords=True, self.key = key def keywords_target(self, target): - """ - Helper method to easily set the target on the allowed_keywords Keywords. + """Helper method to easily set the target on the allowed_keywords Keywords. + """ self.allowed_keywords.target = target return self def filtered(self, allowed): - """ - Return a new Options object that is filtered by the specified + """Return a new Options object that is filtered by the specified list of keys. Mutating self.kwargs to filter is unsafe due to the option expansion that occurs on initialization. + """ kws = {k:v for k,v in self.kwargs.items() if k in allowed} return self.__class__(key=self.key, @@ -505,8 +507,8 @@ def filtered(self, allowed): def __call__(self, allowed_keywords=None, **kwargs): - """ - Create a new Options object that inherits the parent options. + """Create a new Options object that inherits the parent options. + """ if 'key' not in kwargs: kwargs['key'] = self.key @@ -515,14 +517,16 @@ def __call__(self, allowed_keywords=None, **kwargs): return self.__class__(**dict(self.kwargs, **inherited_style)) def keys(self): - "The keyword names across the supplied options." + """The keyword names across the supplied options. + + """ return sorted(list(self.kwargs.keys())) def max_cycles(self, num): - """ - Truncates all contained Palette objects to a maximum number + """Truncates all contained Palette objects to a maximum number of samples and returns a new Options object containing the truncated or resampled Palettes. + """ kwargs = {kw: (arg[num] if isinstance(arg, Palette) else arg) for kw, arg in self.kwargs.items()} @@ -530,13 +534,15 @@ def max_cycles(self, num): @property def cyclic(self): - "Returns True if the options cycle, otherwise False" + """Returns True if the options cycle, otherwise False + + """ return any(isinstance(val, Cycle) for val in self.kwargs.values()) def __getitem__(self, index): - """ - Infinite cyclic indexing of options over the integers, + """Infinite cyclic indexing of options over the integers, looping over the set of defined Cycle objects. + """ if len(self.kwargs) == 0: return {} @@ -551,7 +557,9 @@ def __getitem__(self, index): @property def options(self): - "Access of the options keywords when no cycles are defined." + """Access of the options keywords when no cycles are defined. + + """ if not self.cyclic: return self[0] else: @@ -574,8 +582,7 @@ def __str__(self): class OptionTree(AttrTree): - """ - A subclass of AttrTree that is used to define the inheritance + """A subclass of AttrTree that is used to define the inheritance relationships between a collection of Options objects. Each node of the tree supports a group of Options objects and the leaf nodes inherit their keyword values from parent nodes up to the root. @@ -597,6 +604,7 @@ class OptionTree(AttrTree): the options specification. This acts as an alternative was of specifying the options groups of the current node. Note that this approach method may only be used with the group lists format. + """ def __init__(self, items=None, identifier=None, parent=None, @@ -625,10 +633,10 @@ def __init__(self, items=None, identifier=None, parent=None, StoreOptions.apply_customizations(options, self) def _merge_options(self, identifier, group_name, options): - """ - Computes a merged Options object for the given group + """Computes a merged Options object for the given group name from the existing Options on the node and the new Options which are passed in. + """ if group_name not in self.groups: raise KeyError(f"Group {group_name} not defined on SettingTree.") @@ -663,9 +671,9 @@ def __getitem__(self, item): return super().__getitem__(item) def __getattr__(self, identifier): - """ - Allows creating sub OptionTree instances using attribute + """Allows creating sub OptionTree instances using attribute access, inheriting the group options. + """ try: return super(AttrTree, self).__getattr__(identifier) @@ -730,11 +738,11 @@ def __setattr__(self, identifier, val): self[identifier].__setattr__(subtree.identifier, subtree) def find(self, path, mode='node'): - """ - Find the closest node or path to an the arbitrary path that is + """Find the closest node or path to an the arbitrary path that is supplied down the tree from the given node. The mode argument may be either 'node' or 'path' which determines the return type. + """ path = path.split('.') if isinstance(path, str) else list(path) item = self @@ -751,13 +759,13 @@ def find(self, path, mode='node'): return item if mode == 'node' else item.path def closest(self, obj, group, defaults=True, backend=None): - """ - This method is designed to be called from the root of the + """This method is designed to be called from the root of the tree. Given any LabelledData object, this method will return the most appropriate Options object, including inheritance. In addition, closest supports custom options by checking the object + """ opts_spec = ( obj.__class__.__name__, @@ -778,9 +786,9 @@ def closest(self, obj, group, defaults=True, backend=None): return options def options(self, group, target=None, defaults=True, backend=None): - """ - Using inheritance up to the root, get the complete Options + """Using inheritance up to the root, get the complete Options object for the given node and the specified group. + """ if target is None: target = self.path @@ -803,8 +811,8 @@ def options(self, group, target=None, defaults=True, backend=None): return Options(**dict(parent_opts.kwargs, **self.groups[group].kwargs)) def __repr__(self): - """ - Evalable representation of the OptionTree. + """Evalable representation of the OptionTree. + """ groups = self.__dict__['groups'] # Tab and group entry separators @@ -844,8 +852,7 @@ def __repr__(self): class Compositor(param.Parameterized): - """ - A Compositor is a way of specifying an operation to be automatically + """A Compositor is a way of specifying an operation to be automatically applied to Overlays that match a specified pattern upon display. Any Operation that takes an Overlay as input may be used to define a @@ -854,6 +861,7 @@ class Compositor(param.Parameterized): For instance, a compositor may be defined to automatically display three overlaid monochrome matrices as an RGB image as long as the values names of those matrices match 'R', 'G' and 'B'. + """ mode = param.Selector(default='data', @@ -897,12 +905,12 @@ class Compositor(param.Parameterized): @classmethod def strongest_match(cls, overlay, mode, backend=None): - """ - Returns the single strongest matching compositor operation + """Returns the single strongest matching compositor operation given an overlay. If no matches are found, None is returned. The best match is defined as the compositor operation with the highest match value as returned by the match_level method. + """ match_strength = [ (op.match_level(overlay), op) for op in cls.definitions @@ -918,8 +926,8 @@ def strongest_match(cls, overlay, mode, backend=None): @classmethod def collapse_element(cls, overlay, ranges=None, mode='data', backend=None): - """ - Finds any applicable compositor and applies it. + """Finds any applicable compositor and applies it. + """ from .element import Element from .overlay import CompositeOverlay, Overlay @@ -967,8 +975,8 @@ def collapse_element(cls, overlay, ranges=None, mode='data', backend=None): @classmethod def collapse(cls, holomap, ranges=None, mode='data'): - """ - Given a map of Overlays, apply all applicable compositors. + """Given a map of Overlays, apply all applicable compositors. + """ # No potential compositors if cls.definitions == []: @@ -983,9 +991,9 @@ def collapse(cls, holomap, ranges=None, mode='data'): @classmethod def map(cls, obj, mode='data', backend=None): - """ - Applies compositor operations to any HoloViews element or container + """Applies compositor operations to any HoloViews element or container using the map method. + """ from .overlay import CompositeOverlay element_compositors = [c for c in cls.definitions if len(c._pattern_spec) == 1] @@ -1038,16 +1046,16 @@ def __init__(self, pattern, operation, group, mode, transfer_options=False, @property def output_type(self): - """ - Returns the operation output_type unless explicitly overridden + """Returns the operation output_type unless explicitly overridden in the kwargs. + """ return self._output_type or self.operation.output_type def _slice_match_level(self, overlay_items): - """ - Find the match strength for a list of overlay items that must + """Find the match strength for a list of overlay items that must be exactly the same length as the pattern specification. + """ level = 0 for spec, el in zip(self._pattern_spec, overlay_items): @@ -1069,13 +1077,13 @@ def _slice_match_level(self, overlay_items): return level def match_level(self, overlay): - """ - Given an overlay, return the match level and applicable slice + """Given an overlay, return the match level and applicable slice of the overall overlay. The level an integer if there is a match or None if there is no match. The level integer is the number of matching components. Higher values indicate a stronger match. + """ slice_width = len(self._pattern_spec) if slice_width > len(overlay): return None @@ -1093,8 +1101,8 @@ def match_level(self, overlay): return (best_lvl, match_slice) if best_lvl != 0 else None def apply(self, value, input_ranges, backend=None): - """ - Apply the compositor on the input with the given input ranges. + """Apply the compositor on the input with the given input ranges. + """ from .overlay import CompositeOverlay if backend is None: backend = Store.current_backend @@ -1113,14 +1121,14 @@ def apply(self, value, input_ranges, backend=None): class Store: - """ - The Store is what links up HoloViews objects to their + """The Store is what links up HoloViews objects to their corresponding options and to the appropriate classes of the chosen backend (e.g. for rendering). In addition, Store supports pickle operations that automatically pickle and unpickle the corresponding options for a HoloViews object. + """ renderers = {} # The set of available Renderers across all backends. @@ -1161,7 +1169,9 @@ class Store: @classmethod def set_current_backend(cls, backend): - "Use this method to set the backend to run the switch hooks" + """Use this method to set the backend to run the switch hooks + + """ for hook in cls._backend_switch_hooks: hook(backend) cls.current_backend = backend @@ -1177,9 +1187,9 @@ def options(cls, backend=None, val=None): @classmethod def loaded_backends(cls): - """ - Returns a list of the backends that have been loaded, based on + """Returns a list of the backends that have been loaded, based on the available OptionTrees. + """ return sorted(cls._options.keys()) @@ -1193,9 +1203,9 @@ def custom_options(cls, val=None, backend=None): @classmethod def load(cls, filename): - """ - Equivalent to pickle.load except that the HoloViews trees is + """Equivalent to pickle.load except that the HoloViews trees is restored appropriately. + """ cls.load_counter_offset = StoreOptions.id_offset() val = pickle.load(filename) @@ -1204,9 +1214,9 @@ def load(cls, filename): @classmethod def loads(cls, pickle_string): - """ - Equivalent to pickle.loads except that the HoloViews trees is + """Equivalent to pickle.loads except that the HoloViews trees is restored appropriately. + """ cls.load_counter_offset = StoreOptions.id_offset() val = pickle.loads(pickle_string) @@ -1215,9 +1225,9 @@ def loads(cls, pickle_string): @classmethod def dump(cls, obj, file, protocol=0): - """ - Equivalent to pickle.dump except that the HoloViews option + """Equivalent to pickle.dump except that the HoloViews option tree is saved appropriately. + """ cls.save_option_state = True pickle.dump(obj, file, protocol=protocol) @@ -1225,9 +1235,9 @@ def dump(cls, obj, file, protocol=0): @classmethod def dumps(cls, obj, protocol=0): - """ - Equivalent to pickle.dumps except that the HoloViews option + """Equivalent to pickle.dumps except that the HoloViews option tree is saved appropriately. + """ cls.save_option_state = True val = pickle.dumps(obj, protocol=protocol) @@ -1237,10 +1247,10 @@ def dumps(cls, obj, protocol=0): @classmethod def info(cls, obj, ansi=True, backend='matplotlib', visualization=True, recursive=False, pattern=None, elements=None): - """ - Show information about a particular object or component class + """Show information about a particular object or component class including the applicable style and plot options. Returns None if the object is not parameterized. + """ if elements is None: elements = [] @@ -1279,9 +1289,9 @@ def lookup_options(cls, backend, obj, group, defaults=True): @classmethod def lookup(cls, backend, obj): - """ - Given an object, lookup the corresponding customized option + """Given an object, lookup the corresponding customized option tree if a single custom tree is applicable. + """ ids = {el for el in obj.traverse(lambda x: x.id) if el is not None} if len(ids) == 0: @@ -1294,9 +1304,9 @@ def lookup(cls, backend, obj): @classmethod def transfer_options(cls, obj, new_obj, backend=None, names=None, level=3): - """ - Transfers options for all backends from one object to another. + """Transfers options for all backends from one object to another. Drops any options defined in the supplied drop list. + """ if obj is new_obj: return @@ -1320,14 +1330,14 @@ def transfer_options(cls, obj, new_obj, backend=None, names=None, level=3): @classmethod def add_style_opts(cls, component, new_options, backend=None): - """ - Given a component such as an Element (e.g. Image, Curve) or a + """Given a component such as an Element (e.g. Image, Curve) or a container (e.g. Layout) specify new style options to be accepted by the corresponding plotting class. - Note: This is supplied for advanced users who know which + Note : This is supplied for advanced users who know which additional style keywords are appropriate for the corresponding plotting class. + """ backend = cls.current_backend if backend is None else backend if component not in cls.registry[backend]: @@ -1351,9 +1361,9 @@ def add_style_opts(cls, component, new_options, backend=None): @classmethod def register(cls, associations, backend, style_aliases=None): - """ - Register the supplied dictionary of associations between + """Register the supplied dictionary of associations between elements and plotting classes to the specified backend. + """ if style_aliases is None: style_aliases = {} @@ -1396,19 +1406,19 @@ def register(cls, associations, backend, style_aliases=None): @classmethod def set_display_hook(cls, group, objtype, hook): - """ - Specify a display hook that will be applied to objects of type + """Specify a display hook that will be applied to objects of type objtype. The group specifies the set to which the display hook belongs, allowing the Store to compute the precedence within each group. + """ cls._display_hooks[group][objtype] = hook @classmethod def render(cls, obj): - """ - Using any display hooks that have been registered, render the + """Using any display hooks that have been registered, render the object to a dictionary of MIME types and metadata information. + """ class_hierarchy = inspect.getmro(type(obj)) hooks = [] @@ -1430,8 +1440,7 @@ def render(cls, obj): class StoreOptions: - """ - A collection of utilities for advanced users for creating and + """A collection of utilities for advanced users for creating and setting customized option trees on the Store. Designed for use by either advanced users or the %opts line and cell magics which use this machinery. @@ -1445,6 +1454,7 @@ class StoreOptions: Lastly this class offers a means to record all OptionErrors generated by an option specification. This is used for validation purposes. + """ #=======================# @@ -1455,17 +1465,17 @@ class StoreOptions: @classmethod def start_recording_skipped(cls): - """ - Start collecting OptionErrors for all skipped options recorded + """Start collecting OptionErrors for all skipped options recorded with the record_skipped_option method + """ cls._errors_recorded = [] @classmethod def stop_recording_skipped(cls): - """ - Stop collecting OptionErrors recorded with the + """Stop collecting OptionErrors recorded with the record_skipped_option method and return them + """ if cls._errors_recorded is None: raise Exception('Cannot stop recording before it is started') @@ -1475,9 +1485,9 @@ def stop_recording_skipped(cls): @classmethod def record_skipped_option(cls, error): - """ - Record the OptionError associated with a skipped option if + """Record the OptionError associated with a skipped option if currently recording + """ if cls._errors_recorded is not None: cls._errors_recorded.append(error) @@ -1493,8 +1503,8 @@ def get_object_ids(cls, obj): @classmethod def tree_to_dict(cls, tree): - """ - Given an OptionTree, convert it into the equivalent dictionary format. + """Given an OptionTree, convert it into the equivalent dictionary format. + """ specs = {} for k in tree.keys(): @@ -1508,10 +1518,10 @@ def tree_to_dict(cls, tree): @classmethod def propagate_ids(cls, obj, match_id, new_id, applied_keys, backend=None): - """ - Recursively propagate an id through an object for components + """Recursively propagate an id through an object for components matching the applied_keys. This method can only be called if there is a tree with a matching id in Store.custom_options + """ applied = [] def propagate(o): @@ -1528,26 +1538,26 @@ def propagate(o): @classmethod def capture_ids(cls, obj): - """ - Given an list of ids, capture a list of ids that can be + """Given an list of ids, capture a list of ids that can be restored using the restore_ids. + """ return obj.traverse(lambda o: o.id) @classmethod def restore_ids(cls, obj, ids): - """ - Given an list of ids as captured with capture_ids, restore the + """Given an list of ids as captured with capture_ids, restore the ids. Note the structure of an object must not change between the calls to capture_ids and restore_ids. + """ ids = iter(ids) obj.traverse(lambda o: setattr(o, 'id', next(ids))) @classmethod def apply_customizations(cls, spec, options): - """ - Apply the given option specs to the supplied options tree. + """Apply the given option specs to the supplied options tree. + """ for key in sorted(spec.keys()): if isinstance(spec[key], (list, tuple)): @@ -1564,14 +1574,14 @@ def apply_customizations(cls, spec, options): @classmethod def validate_spec(cls, spec, backends=None): - """ - Given a specification, validated it against the options tree for + """Given a specification, validated it against the options tree for the specified backends by raising OptionError for invalid options. If backends is None, validates against all the currently loaded backend. Only useful when invalid keywords generate exceptions instead of skipping, i.e. Options.skip_invalid is False. + """ loaded_backends = Store.loaded_backends() if backends is None else backends @@ -1605,9 +1615,9 @@ def validate_spec(cls, spec, backends=None): @classmethod def validation_error_message(cls, spec, backends=None): - """ - Returns an options validation error message if there are any + """Returns an options validation error message if there are any invalid keywords. Otherwise returns None. + """ try: cls.validate_spec(spec, backends=backends) @@ -1616,10 +1626,10 @@ def validation_error_message(cls, spec, backends=None): @classmethod def expand_compositor_keys(cls, spec): - """ - Expands compositor definition keys into {type}.{group} + """Expands compositor definition keys into {type}.{group} keys. For instance a compositor operation returning a group string 'Image' of element type RGB expands to 'RGB.Image'. + """ expanded_spec = {} applied_keys = [] @@ -1638,14 +1648,14 @@ def expand_compositor_keys(cls, spec): @classmethod def create_custom_trees(cls, obj, options=None, backend=None): - """ - Returns the appropriate set of customized subtree clones for + """Returns the appropriate set of customized subtree clones for an object, suitable for merging with Store.custom_options (i.e with the ids appropriately offset). Note if an object has no integer ids a new OptionTree is built. The id_mapping return value is a list mapping the ids that need to be matched as set to their new values. + """ clones, id_mapping = {}, [] obj_ids = cls.get_object_ids(obj) @@ -1693,8 +1703,7 @@ def create_custom_trees(cls, obj, options=None, backend=None): @classmethod def merge_options(cls, groups, options=None,**kwargs): - """ - Given a full options dictionary and options groups specified + """Given a full options dictionary and options groups specified as a keywords, return the full set of merged options: >>> options={'Curve':{'style':dict(color='b')}} @@ -1702,6 +1711,7 @@ def merge_options(cls, groups, options=None,**kwargs): >>> merged = StoreOptions.merge_options(['style'], options, style=style) >>> sorted(merged['Curve']['style'].items()) [('color', 'b'), ('linewidth', 10)] + """ groups = set(groups) if (options is not None and set(options.keys()) <= groups): @@ -1731,11 +1741,11 @@ def merge_options(cls, groups, options=None,**kwargs): @classmethod def state(cls, obj, state=None): - """ - Method to capture and restore option state. When called + """Method to capture and restore option state. When called without any state supplied, the current state is returned. Then if this state is supplied back in a later call using the same object, the original state is restored. + """ if state is None: ids = cls.capture_ids(obj) @@ -1751,8 +1761,7 @@ def state(cls, obj, state=None): @classmethod @contextmanager def options(cls, obj, options=None, **kwargs): - """ - Context-manager for temporarily setting options on an object + """Context-manager for temporarily setting options on an object (if options is None, no options will be set) . Once the context manager exits, both the object and the Store will be left in exactly the same state they were in before the context @@ -1760,6 +1769,7 @@ def options(cls, obj, options=None, **kwargs): See holoviews.core.options.set_options function for more information on the options specification format. + """ if (options is not None) or kwargs: Store._options_context = True @@ -1777,9 +1787,9 @@ def options(cls, obj, options=None, **kwargs): @classmethod def id_offset(cls): - """ - Compute an appropriate offset for future id values given the set + """Compute an appropriate offset for future id values given the set of ids currently defined across backends. + """ max_ids = [] for backend in Store.renderers.keys(): @@ -1793,11 +1803,11 @@ def id_offset(cls): @classmethod def update_backends(cls, id_mapping, custom_trees, backend=None): - """ - Given the id_mapping from previous ids to new ids and the new + """Given the id_mapping from previous ids to new ids and the new custom tree dictionary, update the current backend with the supplied trees and update the keys in the remaining backends to stay linked with the current object. + """ backend = Store.current_backend if backend is None else backend # Update the custom option entries for the current backend @@ -1816,8 +1826,7 @@ def update_backends(cls, id_mapping, custom_trees, backend=None): @classmethod def set_options(cls, obj, options=None, backend=None, **kwargs): - """ - Pure Python function for customize HoloViews objects in terms of + """Pure Python function for customize HoloViews objects in terms of their style, plot and normalization options. The options specification is a dictionary containing the target @@ -1852,6 +1861,7 @@ def set_options(cls, obj, options=None, backend=None, **kwargs): Then setting both plot and style options: set_options(my_image, OptsSpec.parse("Image [size=50] (cmap='Blues')")) + """ # Note that an alternate, more verbose and less recommended # syntax can also be used: diff --git a/holoviews/core/overlay.py b/holoviews/core/overlay.py index cb18975ade..a4f977b824 100644 --- a/holoviews/core/overlay.py +++ b/holoviews/core/overlay.py @@ -1,5 +1,4 @@ -""" -Supplies Layer and related classes that allow overlaying of Views, +"""Supplies Layer and related classes that allow overlaying of Views, including Overlay. A Layer is the final extension of View base class that allows Views to be overlaid on top of each other. @@ -19,12 +18,15 @@ class Overlayable: - """ - Overlayable provides a mix-in class to support the + """Overlayable provides a mix-in class to support the mul operation for overlaying multiple elements. + """ + def __mul__(self, other): - "Overlay object with other object." + """Overlay object with other object. + + """ # Local import to break the import cyclic dependency from .spaces import DynamicMap @@ -54,8 +56,8 @@ def dynamic_mul(*args, **kwargs): class CompositeOverlay(ViewableElement, Composable): - """ - CompositeOverlay provides a common baseclass for Overlay classes. + """CompositeOverlay provides a common baseclass for Overlay classes. + """ _deep_indexable = True @@ -67,19 +69,27 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, Defaults to first value dimension if present otherwise falls back to first key dimension. - Args: - dimension: Dimension(s) to compute histogram on, - Falls back the plot dimensions by default. - num_bins (int, optional): Number of bins - bin_range (tuple optional): Lower and upper bounds of bins - adjoin (bool, optional): Whether to adjoin histogram - index (int, optional): Index of layer to apply hist to - show_legend (bool, optional): Show legend in histogram - (don't show legend by default). - - Returns: - AdjointLayout of element and histogram or just the - histogram + Parameters + ---------- + dimension + Dimension(s) to compute histogram on, + Falls back the plot dimensions by default. + num_bins : int, optional + Number of bins + bin_range : tuple, optional + Lower and upper bounds of bins + adjoin : bool, optional + Whether to adjoin histogram + index : int, optional + Index of layer to apply hist to + show_legend : bool, optional + Show legend in histogram + (don't show legend by default). + + Returns + ------- + AdjointLayout of element and histogram or just the + histogram """ # Get main layer to get plot dimensions main_layer_int_index = getattr(self, "main_layer", None) or 0 @@ -131,18 +141,23 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar: If false returns unique values + * Geometry: If false returns scalar values per geometry + * Gridded: If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ values = [] found = False @@ -160,8 +175,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): class Overlay(ViewableTree, CompositeOverlay, Layoutable, Overlayable): - """ - An Overlay consists of multiple Elements (potentially of + """An Overlay consists of multiple Elements (potentially of heterogeneous type) presented one on top each other with a particular z-ordering. @@ -169,6 +183,7 @@ class Overlay(ViewableTree, CompositeOverlay, Layoutable, Overlayable): a Layout and in fact extend the Layout structure. Overlays are constructed using the * operator (building an identical structure to the + operator). + """ def __init__(self, items=None, group=None, label=None, **params): @@ -178,9 +193,9 @@ def __init__(self, items=None, group=None, label=None, **params): super().__init__(items, **params) def __getitem__(self, key): - """ - Allows transparently slicing the Elements in the Overlay + """Allows transparently slicing the Elements in the Overlay to select specific layers in an Overlay use the .get method. + """ return Overlay([(k, v[key]) for k, v in self.items()]) @@ -191,12 +206,16 @@ def get(self, identifier, default=None): Get a particular layer in the Overlay using its path string or an integer index. - Args: - identifier: Index or path string of the item to return - default: Value to return if no item is found + Parameters + ---------- + identifier + Index or path string of the item to return + default + Value to return if no item is found - Returns: - The indexed layer of the Overlay + Returns + ------- + The indexed layer of the Overlay """ if isinstance(identifier, int): values = list(self.data.values()) @@ -207,9 +226,9 @@ def get(self, identifier, default=None): return super().get(identifier, default) def collate(self): - """ - Collates any objects in the Overlay resolving any issues + """Collates any objects in the Overlay resolving any issues the recommended nesting structure. + """ return reduce(lambda x,y: x*y, self.values()) @@ -225,8 +244,9 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns an Overlay + Returns + ------- + DynamicMap that returns an Overlay """ from .decollate import decollate return decollate(self) @@ -294,10 +314,10 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, **overrid class NdOverlay(Overlayable, UniformNdMapping, CompositeOverlay): - """ - An NdOverlay allows a group of NdOverlay to be overlaid together. NdOverlay can + """An NdOverlay allows a group of NdOverlay to be overlaid together. NdOverlay can be indexed out of an overlay and an overlay is an iterable that iterates over the contained layers. + """ kdims = param.List(default=[Dimension('Element')], constant=True, doc=""" @@ -321,8 +341,9 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns an NdOverlay + Returns + ------- + DynamicMap that returns an NdOverlay """ from .decollate import decollate return decollate(self) diff --git a/holoviews/core/pprint.py b/holoviews/core/pprint.py index 1475819ae9..e2211e823d 100644 --- a/holoviews/core/pprint.py +++ b/holoviews/core/pprint.py @@ -1,5 +1,4 @@ -""" -HoloViews can be used to build highly-nested data-structures +"""HoloViews can be used to build highly-nested data-structures containing large amounts of raw data. As a result, it is difficult to generate a readable representation that is both informative yet concise. @@ -9,6 +8,7 @@ far too large to be practical. Instead, all HoloViews objects can be represented as tree structures, showing how to access and index into your data. + """ import re @@ -22,8 +22,7 @@ class ParamFilter(param.ParameterizedFunction): - """ - Given a parameterized object, return a proxy parameterized object + """Given a parameterized object, return a proxy parameterized object holding only the parameters that match some filter criterion. A filter is supplied with the parameter name and the parameter @@ -34,6 +33,7 @@ class ParamFilter(param.ParameterizedFunction): filtered = ParamFilter(obj, ParamFilter.regexp_filter('bounds')) This may be used to filter documentation generated by param. + """ def __call__(self, obj, filter_fn=None): @@ -58,9 +58,9 @@ def __call__(self, obj, filter_fn=None): @param.parameterized.bothmethod def regexp_filter(self_or_cls, pattern): - """ - Builds a parameter filter using the supplied pattern (may be a + """Builds a parameter filter using the supplied pattern (may be a general Python regular expression) + """ def inner_filter(name, p): name_match = re.search(pattern,name) @@ -75,10 +75,11 @@ def inner_filter(name, p): class InfoPrinter: - """ - Class for printing other information related to an object that is + """Class for printing other information related to an object that is of use to the user. + """ + headings = ['\x1b[1;35m%s\x1b[0m', '\x1b[1;32m%s\x1b[0m'] ansi_escape = re.compile(r'\x1b[^m]*m') ppager = ParamPager() @@ -88,8 +89,8 @@ class InfoPrinter: @classmethod def get_parameter_info(cls, obj, ansi=False, show_values=True, pattern=None, max_col_len=40): - """ - Get parameter information from the supplied class or object. + """Get parameter information from the supplied class or object. + """ if cls.ppager is None: return '' if pattern is not None: @@ -109,9 +110,9 @@ def get_parameter_info(cls, obj, ansi=False, show_values=True, @classmethod def heading(cls, heading_text, char='=', level=0, ansi=False): - """ - Turn the supplied heading text into a suitable heading with + """Turn the supplied heading text into a suitable heading with optional underline and color. + """ heading_color = cls.headings[level] if ansi else '%s' if char is None: @@ -131,9 +132,9 @@ def highlight(cls, pattern, string): @classmethod def info(cls, obj, ansi=False, backend='matplotlib', visualization=True, pattern=None, elements=None): - """ - Show information about an object in the given category. ANSI + """Show information about an object in the given category. ANSI color codes may be enabled or disabled. + """ if elements is None: elements = [] @@ -254,9 +255,9 @@ def options_info(cls, plot_class, ansi=False, pattern=None): class PrettyPrinter(param.Parameterized): - """ - The PrettyPrinter used to print all HoloView objects via the + """The PrettyPrinter used to print all HoloView objects via the pprint method. + """ show_defaults = param.Boolean(default=False, doc=""" @@ -291,15 +292,17 @@ def padding(cls_or_slf, items): @bothmethod def component_type(cls_or_slf, node): - "Return the type.group.label dotted information" + """Return the type.group.label dotted information + + """ if node is None: return '' return cls_or_slf.type_formatter.format(type=str(type(node).__name__)) @bothmethod def recurse(cls_or_slf, node, attrpath=None, attrpaths=None, siblings=None, level=0, value_dims=True): - """ - Recursive function that builds up an ASCII tree given an + """Recursive function that builds up an ASCII tree given an AttrTree node. + """ if siblings is None: siblings = [] @@ -315,8 +318,8 @@ def recurse(cls_or_slf, node, attrpath=None, attrpaths=None, siblings=None, leve @bothmethod def node_info(cls_or_slf, node, attrpath, attrpaths, siblings, level, value_dims): - """ - Given a node, return relevant information. + """Given a node, return relevant information. + """ opts = None if hasattr(node, 'children'): @@ -347,9 +350,9 @@ def node_info(cls_or_slf, node, attrpath, attrpaths, siblings, level, value_dims @bothmethod def element_info(cls_or_slf, node, siblings, level, value_dims): - """ - Return the information summary for an Element. This consists + """Return the information summary for an Element. This consists of the dotted name followed by an value dimension names. + """ info = cls_or_slf.component_type(node) if len(node.kdims) >= 1: diff --git a/holoviews/core/sheetcoords.py b/holoviews/core/sheetcoords.py index 690a55d709..1208d83157 100644 --- a/holoviews/core/sheetcoords.py +++ b/holoviews/core/sheetcoords.py @@ -1,5 +1,4 @@ -""" -File originally part of the Topographica project. Provides +"""File originally part of the Topographica project. Provides SheetCoordinateSystem, allowing conversion between continuous 'sheet coordinates' and integer matrix coordinates. @@ -73,6 +72,7 @@ Of course, it would be an error to try to pass matrix coordinates like [0,2] to the sheet2matrix calls; the result would be a value far outside of the actual matrix. + """ import numpy as np @@ -116,10 +116,11 @@ class SheetCoordinateSystem: - """ - Provides methods to allow conversion between sheet and matrix + """Provides methods to allow conversion between sheet and matrix coordinates. + """ + def __get_xdensity(self): return self.__xdensity def __get_ydensity(self): @@ -142,8 +143,7 @@ def __get_shape(self): _time_unit = 'us' def __init__(self,bounds,xdensity,ydensity=None): - """ - Store the bounds (as l,b,r,t in an array), xdensity, and + """Store the bounds (as l,b,r,t in an array), xdensity, and ydensity. If ydensity is not specified, it is assumed that the specified @@ -153,6 +153,7 @@ def __init__(self,bounds,xdensity,ydensity=None): If both xdensity and ydensity are specified, these and the bounds are taken to be exact and are not adjusted. + """ if not ydensity: bounds,xdensity = self.__equalize_densities(bounds,xdensity) @@ -178,11 +179,11 @@ def __set_ydensity(self,density): def __equalize_densities(self,nominal_bounds,nominal_density): - """ - Calculate the true density along x, and adjust the top and + """Calculate the true density along x, and adjust the top and bottom bounds so that the density along y will be equal. Returns (adjusted_bounds, true_density) + """ left,bottom,right,top = nominal_bounds.lbrt() width, height = right-left, top-bottom @@ -200,8 +201,7 @@ def __equalize_densities(self,nominal_bounds,nominal_density): def sheet2matrix(self,x,y): - """ - Convert a point (x,y) in Sheet coordinates to continuous + """Convert a point (x,y) in Sheet coordinates to continuous matrix coordinates. Returns (float_row,float_col), where float_row corresponds to @@ -218,6 +218,7 @@ def sheet2matrix(self,x,y): is outside. Similarly, y=0.5 is inside (at row 0) but y=-0.5 is outside (at row 3) (it's the other way round for y because the matrix row index increases as y decreases). + """ # First translate to (left,top), which is [0,0] in the matrix, # then scale to the size of the matrix. The y coordinate needs @@ -243,8 +244,7 @@ def sheet2matrix(self,x,y): def sheet2matrixidx(self,x,y): - """ - Convert a point (x,y) in sheet coordinates to the integer row + """Convert a point (x,y) in sheet coordinates to the integer row and column index of the matrix cell in which that point falls, given a bounds and density. Returns (row,column). @@ -254,6 +254,7 @@ def sheet2matrixidx(self,x,y): right and bottom boundaries are exclusive. Valid for scalar or array x and y. + """ r,c = self.sheet2matrix(x,y) r = np.floor(r) @@ -266,14 +267,14 @@ def sheet2matrixidx(self,x,y): def matrix2sheet(self,float_row,float_col): - """ - Convert a floating-point location (float_row,float_col) in + """Convert a floating-point location (float_row,float_col) in matrix coordinates to its corresponding location (x,y) in sheet coordinates. Valid for scalar or array float_row and float_col. Inverse of sheet2matrix(). + """ xoffset = float_col*self.__xstep if isinstance(self.lbrt[0], datetime_types): @@ -287,17 +288,19 @@ def matrix2sheet(self,float_row,float_col): def matrixidx2sheet(self,row,col): - """ - Return (x,y) where x and y are the floating point coordinates + """Return (x,y) where x and y are the floating point coordinates of the *center* of the given matrix cell (row,col). If the matrix cell represents a 0.2 by 0.2 region, then the center location returned would be 0.1,0.1. - NOTE: This is NOT the strict mathematical inverse of + NOTE + ---- + This is NOT the strict mathematical inverse of sheet2matrixidx(), because sheet2matrixidx() discards all but the integer portion of the continuous matrix coordinate. Valid only for scalar or array row and col. + """ x,y = self.matrix2sheet((row+0.5), (col+0.5)) @@ -310,18 +313,18 @@ def matrixidx2sheet(self,row,col): def closest_cell_center(self,x,y): - """ - Given arbitrary sheet coordinates, return the sheet coordinates + """Given arbitrary sheet coordinates, return the sheet coordinates of the center of the closest unit. + """ return self.matrixidx2sheet(*self.sheet2matrixidx(x,y)) def sheetcoordinates_of_matrixidx(self): - """ - Return x,y where x is a vector of sheet coordinates + """Return x,y where x is a vector of sheet coordinates representing the x-center of each matrix cell, and y represents the corresponding y-center of the cell. + """ rows,cols = self.shape return self.matrixidx2sheet(np.arange(rows), np.arange(cols)) @@ -329,8 +332,7 @@ def sheetcoordinates_of_matrixidx(self): class Slice(np.ndarray): - """ - Represents a slice of a SheetCoordinateSystem; i.e., an array + """Represents a slice of a SheetCoordinateSystem; i.e., an array specifying the row and column start and end points for a submatrix of the SheetCoordinateSystem. @@ -344,6 +346,7 @@ class Slice(np.ndarray): SheetCoordinateSystem, and that actions such as translate() also do not respect the bounds. To ensure that the slice is within the SheetCoordinateSystem's bounds, use crop_to_sheet(). + """ __slots__ = [] @@ -355,9 +358,9 @@ def compute_bounds(self,scs): def __new__(cls, bounds, sheet_coordinate_system, force_odd=False, min_matrix_radius=1): - """ - Create a slice of the given sheet_coordinate_system from the + """Create a slice of the given sheet_coordinate_system from the specified bounds. + """ if force_odd: slicespec=Slice._createoddslicespec(bounds,sheet_coordinate_system, @@ -370,8 +373,7 @@ def __new__(cls, bounds, sheet_coordinate_system, force_odd=False, def submatrix(self,matrix): - """ - Return the submatrix of the given matrix specified by this + """Return the submatrix of the given matrix specified by this slice. Equivalent to computing the intersection between the @@ -380,14 +382,15 @@ def submatrix(self,matrix): The submatrix is just a view into the sheet_matrix; it is not an independent copy. + """ return matrix[self[0]:self[1],self[2]:self[3]] @staticmethod def findinputslice(coord, sliceshape, sheetshape): - """ - Gets the matrix indices of a slice within an array of size + """Gets the matrix indices of a slice within an array of size sheetshape from a sliceshape, positioned at coord. + """ center_row, center_col = coord n_rows, n_cols = sliceshape @@ -402,11 +405,11 @@ def findinputslice(coord, sliceshape, sheetshape): def positionlesscrop(self,x,y,sheet_coord_system): - """ - Return the correct slice for a weights/mask matrix at this + """Return the correct slice for a weights/mask matrix at this ConnectionField's location on the sheet (i.e. for getting the correct submatrix of the weights or mask in case the unit is near the edge of the sheet). + """ slice_inds = self.findinputslice( sheet_coord_system.sheet2matrixidx(x,y), @@ -416,11 +419,11 @@ def positionlesscrop(self,x,y,sheet_coord_system): def positionedcrop(self,x,y,sheet_coord_system): - """ - Offset the bounds_template to this cf's location and store the + """Offset the bounds_template to this cf's location and store the result in the 'bounds' attribute. Also stores the input_sheet_slice for access by C. + """ cf_row,cf_col = sheet_coord_system.sheet2matrixidx(x,y) bounds_x,bounds_y=self.compute_bounds(sheet_coord_system).centroid() @@ -433,22 +436,30 @@ def positionedcrop(self,x,y,sheet_coord_system): def translate(self, r, c): - "Translate the slice by the given number of rows and columns." + """Translate the slice by the given number of rows and columns. + + """ self+=[r,r,c,c] def set(self,slice_specification): - "Set this slice from some iterable that specifies (r1,r2,c1,c2)." + """Set this slice from some iterable that specifies (r1,r2,c1,c2). + + """ self.put([0,1,2,3],slice_specification) # pylint: disable-msg=E1101 def shape_on_sheet(self): - "Return the shape of the array of the Slice on its sheet." + """Return the shape of the array of the Slice on its sheet. + + """ return self[1]-self[0],self[3]-self[2] def crop_to_sheet(self,sheet_coord_system): - "Crop the slice to the SheetCoordinateSystem's bounds." + """Crop the slice to the SheetCoordinateSystem's bounds. + + """ maxrow,maxcol = sheet_coord_system.shape self[0] = max(0,self[0]) @@ -459,8 +470,7 @@ def crop_to_sheet(self,sheet_coord_system): @staticmethod def _createoddslicespec(bounds,scs,min_matrix_radius): - """ - Create the 'odd' Slice that best approximates the specified + """Create the 'odd' Slice that best approximates the specified sheet-coordinate bounds. The supplied bounds are translated to have a center at the @@ -476,6 +486,7 @@ def _createoddslicespec(bounds,scs,min_matrix_radius): through units, if the units are included on the right and bottom bounds, they will be included on the left and top bounds. This ensures that the slice has odd dimensions. + """ bounds_xcenter,bounds_ycenter=bounds.centroid() sheet_rows,sheet_cols = scs.shape @@ -501,14 +512,14 @@ def _createoddslicespec(bounds,scs,min_matrix_radius): @staticmethod def _boundsspec2slicespec(boundsspec,scs): - """ - Convert an iterable boundsspec (supplying l,b,r,t of a + """Convert an iterable boundsspec (supplying l,b,r,t of a BoundingRegion) into a Slice specification. Includes all units whose centers are within the specified sheet-coordinate bounds specified by boundsspec. Exact inverse of _slicespec2boundsspec(). + """ l,b,r,t = boundsspec @@ -525,11 +536,11 @@ def _boundsspec2slicespec(boundsspec,scs): @staticmethod def _slicespec2boundsspec(slicespec,scs): - """ - Convert an iterable slicespec (supplying r1,r2,c1,c2 of a + """Convert an iterable slicespec (supplying r1,r2,c1,c2 of a Slice) into a BoundingRegion specification. Exact inverse of _boundsspec2slicespec(). + """ r1,r2,c1,c2 = slicespec diff --git a/holoviews/core/spaces.py b/holoviews/core/spaces.py index 7391ac97ed..5d99f6b0eb 100644 --- a/holoviews/core/spaces.py +++ b/holoviews/core/spaces.py @@ -21,8 +21,7 @@ class HoloMap(Layoutable, UniformNdMapping, Overlayable): - """ - A HoloMap is an n-dimensional mapping of viewable elements or + """A HoloMap is an n-dimensional mapping of viewable elements or overlays. Each item in a HoloMap has an tuple key defining the values along each of the declared key dimensions, defining the discretely sampled space of values. @@ -30,6 +29,7 @@ class HoloMap(Layoutable, UniformNdMapping, Overlayable): The visual representation of a HoloMap consists of the viewable objects inside the HoloMap which can be explored by varying one or more widgets mapping onto the key dimensions of the HoloMap. + """ data_type = (ViewableElement, NdMapping, Layout) @@ -47,11 +47,14 @@ def overlay(self, dimensions=None, **kwargs): Groups data by supplied dimension(s) overlaying the groups along the dimension(s). - Args: - dimensions: Dimension(s) of dimensions to group by + Parameters + ---------- + dimensions + Dimension(s) of dimensions to group by - Returns: - NdOverlay object(s) with supplied dimensions + Returns + ------- + NdOverlay object(s) with supplied dimensions """ dimensions = self._valid_dimensions(dimensions) if len(dimensions) == self.ndims: @@ -68,12 +71,14 @@ def grid(self, dimensions=None, **kwargs): Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a GridSpace. - Args: - dimensions: Dimension/str or list + Parameters + ---------- + dimensions : Dimension/str or list Dimension or list of dimensions to group by - Returns: - GridSpace with supplied dimensions + Returns + ------- + GridSpace with supplied dimensions """ dimensions = self._valid_dimensions(dimensions) if len(dimensions) == self.ndims: @@ -88,11 +93,14 @@ def layout(self, dimensions=None, **kwargs): Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a NdLayout. - Args: - dimensions: Dimension(s) to group by + Parameters + ---------- + dimensions + Dimension(s) to group by - Returns: - NdLayout with supplied dimensions + Returns + ------- + NdLayout with supplied dimensions """ dimensions = self._valid_dimensions(dimensions) if len(dimensions) == self.ndims: @@ -120,28 +128,35 @@ def options(self, *args, **kwargs): obj.options({'Image': dict(cmap='viridis', show_title=False)}) - Args: - *args: Sets of options to apply to object - Supports a number of formats including lists of Options - objects, a type[.group][.label] followed by a set of - keyword options to apply and a dictionary indexed by - type[.group][.label] specs. - backend (optional): Backend to apply options to - Defaults to current selected backend - clone (bool, optional): Whether to clone object - Options can be applied inplace with clone=False - **kwargs: Keywords of options - Set of options to apply to the object - - Returns: - Returns the cloned object with the options applied + Parameters + ---------- + *args + Sets of options to apply to object + Supports a number of formats including lists of Options + objects, a type[.group][.label] followed by a set of + keyword options to apply and a dictionary indexed by + type[.group][.label] specs. + backend : optional + Backend to apply options to + Defaults to current selected backend + clone : bool, optional + Whether to clone object + Options can be applied inplace with clone=False + **kwargs: Keywords of options + Set of options to apply to the object + + Returns + ------- + Returns the cloned object with the options applied """ data = dict([(k, v.options(*args, **kwargs)) for k, v in self.data.items()]) return self.clone(data) def _split_overlays(self): - "Splits overlays inside the HoloMap into list of HoloMaps" + """Splits overlays inside the HoloMap into list of HoloMaps + + """ if not issubclass(self.type, CompositeOverlay): return None, self.clone() @@ -160,18 +175,18 @@ def _split_overlays(self): return keys, maps def _dimension_keys(self): - """ - Helper for __mul__ that returns the list of keys together with + """Helper for __mul__ that returns the list of keys together with the dimension labels. + """ return [tuple(zip([d.name for d in self.kdims], [k] if self.ndims == 1 else k)) for k in self.keys()] def _dynamic_mul(self, dimensions, other, keys): - """ - Implements dynamic version of overlaying operation overlaying + """Implements dynamic version of overlaying operation overlaying DynamicMaps and HoloMaps where the key dimensions of one is a strict superset of the other. + """ # If either is a HoloMap compute Dimension values if not isinstance(self, DynamicMap) or not isinstance(other, DynamicMap): @@ -226,6 +241,7 @@ def __mul__(self, other, reverse=False): UniformNdMapping is mulled with is another UniformNdMapping it will try to match up the dimensions, making sure that items with completely different dimensions aren't overlaid. + """ if isinstance(other, HoloMap): self_set = {d.name for d in self.kdims} @@ -293,7 +309,9 @@ def dynamic_mul(*args, **kwargs): return NotImplemented def __lshift__(self, other): - "Adjoin another object to this one returning an AdjointLayout" + """Adjoin another object to this one returning an AdjointLayout + + """ if isinstance(other, (ViewableElement, UniformNdMapping, Empty)): return AdjointLayout([self, other]) elif isinstance(other, AdjointLayout): @@ -318,13 +336,18 @@ def collate(self, merge_type=None, drop=None, drop_constant=False): merging the outer Dimension into any other UniformNdMapping type. - Args: - merge_type: Type of the object to merge with - drop: List of dimensions to drop - drop_constant: Drop constant dimensions automatically - - Returns: - Collated Layout or HoloMap + Parameters + ---------- + merge_type + Type of the object to merge with + drop + List of dimensions to drop + drop_constant + Drop constant dimensions automatically + + Returns + ------- + Collated Layout or HoloMap """ if drop is None: drop = [] @@ -346,8 +369,9 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns an HoloMap + Returns + ------- + DynamicMap that returns an HoloMap """ from .decollate import decollate return decollate(self) @@ -357,15 +381,20 @@ def relabel(self, label=None, group=None, depth=1): Applies relabeling to children up to the supplied depth. - Args: - label (str, optional): New label to apply to returned object - group (str, optional): New group to apply to returned object - depth (int, optional): Depth to which relabel will be applied - If applied to container allows applying relabeling to - contained objects up to the specified depth - - Returns: - Returns relabelled object + Parameters + ---------- + label : str, optional + New label to apply to returned object + group : str, optional + New group to apply to returned object + depth : int, optional + Depth to which relabel will be applied + If applied to container allows applying relabeling to + contained objects up to the specified depth + + Returns + ------- + Returns relabelled object """ return super().relabel(label=label, group=group, depth=depth) @@ -376,15 +405,21 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, Defaults to first value dimension if present otherwise falls back to first key dimension. - Args: - dimension: Dimension(s) to compute histogram on - num_bins (int, optional): Number of bins - bin_range (tuple optional): Lower and upper bounds of bins - adjoin (bool, optional): Whether to adjoin histogram - - Returns: - AdjointLayout of HoloMap and histograms or just the - histograms + Parameters + ---------- + dimension + Dimension(s) to compute histogram on + num_bins : int, optional + Number of bins + bin_range : tuple, optional + Lower and upper bounds of bins + adjoin : bool, optional + Whether to adjoin histogram + + Returns + ------- + AdjointLayout of HoloMap and histograms or just the + histograms """ if dimension is not None and not isinstance(dimension, list): dimension = [dimension] @@ -426,8 +461,7 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, class Callable(param.Parameterized): - """ - Callable allows wrapping callbacks on one or more DynamicMaps + """Callable allows wrapping callbacks on one or more DynamicMaps allowing their inputs (and in future outputs) to be defined. This makes it possible to wrap DynamicMaps with streams and makes it possible to traverse the graph of operations applied @@ -456,6 +490,7 @@ class Callable(param.Parameterized): may be a type[.group][.label] specification for Layouts, an integer index or a suitable NdLayout/GridSpace key. For more information see the DynamicMap tutorial at holoviews.org. + """ callable = param.Callable(default=None, constant=True, allow_refs=False, doc=""" @@ -508,7 +543,9 @@ def argspec(self): @property def noargs(self): - "Returns True if the callable takes no arguments" + """Returns True if the callable takes no arguments + + """ noargs = util.ArgSpec(args=[], varargs=None, keywords=None, defaults=None) return self.argspec == noargs @@ -516,12 +553,16 @@ def noargs(self): def clone(self, callable=None, **overrides): """Clones the Callable optionally with new settings - Args: - callable: New callable function to wrap - **overrides: Parameter overrides to apply + Parameters + ---------- + callable + New callable function to wrap + **overrides + Parameter overrides to apply - Returns: - Cloned Callable object + Returns + ------- + Cloned Callable object """ old = {k: v for k, v in self.param.values().items() if k not in ['callable', 'name']} @@ -536,12 +577,16 @@ def __call__(self, *args, **kwargs): If enabled uses memoization to avoid calling function unnecessarily. - Args: - *args: Arguments passed to the callable function - **kwargs: Keyword arguments passed to the callable function + Parameters + ---------- + *args + Arguments passed to the callable function + **kwargs + Keyword arguments passed to the callable function - Returns: - Return value of the wrapped callable function + Returns + ------- + Return value of the wrapped callable function """ # Nothing to do for callbacks that accept no arguments kwarg_hash = kwargs.pop('_memoization_hash_', ()) @@ -599,9 +644,9 @@ def __call__(self, *args, **kwargs): class Generator(Callable): - """ - Generators are considered a special case of Callable that accept no + """Generators are considered a special case of Callable that accept no arguments and never memoize. + """ callable = param.ClassSelector(default=None, class_ = types.GeneratorType, @@ -626,11 +671,14 @@ def __call__(self): def get_nested_dmaps(dmap): """Recurses DynamicMap to find DynamicMaps inputs - Args: - dmap: DynamicMap to recurse to look for DynamicMap inputs + Parameters + ---------- + dmap + DynamicMap to recurse to look for DynamicMap inputs - Returns: - List of DynamicMap instances that were found + Returns + ------- + List of DynamicMap instances that were found """ if not isinstance(dmap, DynamicMap): return [] @@ -643,22 +691,25 @@ def get_nested_dmaps(dmap): def get_nested_streams(dmap): """Recurses supplied DynamicMap to find all streams - Args: - dmap: DynamicMap to recurse to look for streams + Parameters + ---------- + dmap + DynamicMap to recurse to look for streams - Returns: - List of streams that were found + Returns + ------- + List of streams that were found """ return list({s for dmap in get_nested_dmaps(dmap) for s in dmap.streams}) @contextmanager def dynamicmap_memoization(callable_obj, streams): - """ - Determine whether the Callable should have memoization enabled + """Determine whether the Callable should have memoization enabled based on the supplied streams (typically by a DynamicMap). Memoization is disabled if any of the streams require it it and are currently in a triggered state. + """ memoization_state = bool(callable_obj._stream_memoization) callable_obj._stream_memoization &= not any(s.transient and s._triggering for s in streams) @@ -670,12 +721,13 @@ def dynamicmap_memoization(callable_obj, streams): class periodic: - """ - Implements the utility of the same name on DynamicMap. + """Implements the utility of the same name on DynamicMap. Used to defined periodic event updates that can be started and stopped. + """ + _periodic_util = util.periodic def __init__(self, dmap): @@ -689,15 +741,20 @@ def __call__(self, period, count=None, param_fn=None, timeout=None, block=True): the event method. Runs count times with the specified period. If count is None, runs indefinitely. - Args: - period: Timeout between events in seconds - count: Number of events to trigger - param_fn: Function returning stream updates given count - Stream parameter values should be returned as dictionary - timeout: Overall timeout in seconds - block: Whether the periodic callbacks should be blocking + Parameters + ---------- + period + Timeout between events in seconds + count + Number of events to trigger + param_fn + Function returning stream updates given count + Stream parameter values should be returned as dictionary + timeout + Overall timeout in seconds + block + Whether the periodic callbacks should be blocking """ - if self.instance is not None and not self.instance.completed: raise RuntimeError('Periodic process already running. ' 'Wait until it completes or call ' @@ -715,7 +772,9 @@ def inner(i): self.instance = instance def stop(self): - "Stop the periodic process." + """Stop the periodic process. + + """ self.instance.stop() def __str__(self): @@ -724,11 +783,11 @@ def __str__(self): class DynamicMap(HoloMap): - """ - A DynamicMap is a type of HoloMap where the elements are dynamically + """A DynamicMap is a type of HoloMap where the elements are dynamically generated by a callable. The callable is invoked with values associated with the key dimensions or with values supplied by stream parameters. + """ # Declare that callback is a positional parameter (used in clone) @@ -833,10 +892,10 @@ def redim(self): @property def unbounded(self): - """ - Returns a list of key dimensions that are unbounded, excluding + """Returns a list of key dimensions that are unbounded, excluding stream parameters. If any of these key dimensions are unbounded, the DynamicMap as a whole is also unbounded. + """ unbounded_dims = [] # Dimensioned streams do not need to be bounded @@ -852,7 +911,9 @@ def unbounded(self): @property def current_key(self): - """Returns the current key value.""" + """Returns the current key value. + + """ return self._current_key def _stream_parameters(self): @@ -861,9 +922,9 @@ def _stream_parameters(self): ) def _initial_key(self): - """ - Construct an initial key for based on the lower range bounds or + """Construct an initial key for based on the lower range bounds or values on the key dimensions. + """ key = [] undefined = [] @@ -892,9 +953,9 @@ def _initial_key(self): def _validate_key(self, key): - """ - Make sure the supplied key values are within the bounds + """Make sure the supplied key values are within the bounds specified by the corresponding dimension range and soft_range. + """ if key == () and len(self.kdims) == 0: return () key = util.wrap_tuple(key) @@ -915,8 +976,10 @@ def event(self, **kwargs): Automatically find streams matching the supplied kwargs to update and trigger events on them. - Args: - **kwargs: Events to update streams with + Parameters + ---------- + **kwargs + Events to update streams with """ if self.callback.noargs and self.streams == []: self.param.warning( @@ -949,7 +1012,9 @@ def event(self, **kwargs): def _style(self, retval): - "Applies custom option tree to values return by the callback." + """Applies custom option tree to values return by the callback. + + """ from ..util import opts if self.id not in Store.custom_options(): return retval @@ -958,7 +1023,9 @@ def _style(self, retval): def _execute_callback(self, *args): - "Executes the callback with the appropriate args and kwargs" + """Executes the callback with the appropriate args and kwargs + + """ self._validate_key(args) # Validate input key # Additional validation needed to ensure kwargs don't clash @@ -1003,21 +1070,27 @@ def options(self, *args, **kwargs): obj.options({'Image': dict(cmap='viridis', show_title=False)}) - Args: - *args: Sets of options to apply to object - Supports a number of formats including lists of Options - objects, a type[.group][.label] followed by a set of - keyword options to apply and a dictionary indexed by - type[.group][.label] specs. - backend (optional): Backend to apply options to - Defaults to current selected backend - clone (bool, optional): Whether to clone object - Options can be applied inplace with clone=False - **kwargs: Keywords of options - Set of options to apply to the object - - Returns: - Returns the cloned object with the options applied + Parameters + ---------- + *args + Sets of options to apply to object + Supports a number of formats including lists of Options + objects, a type[.group][.label] followed by a set of + keyword options to apply and a dictionary indexed by + type[.group][.label] specs. + backend : optional + Backend to apply options to + Defaults to current selected backend + clone : bool, optional + Whether to clone object + Options can be applied inplace with clone=False + **kwargs + Keywords of options + Set of options to apply to the object + + Returns + ------- + Returns the cloned object with the options applied """ if 'clone' not in kwargs: kwargs['clone'] = True @@ -1028,18 +1101,26 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - link (bool, optional): Whether clone should be linked - Determines whether Streams and Links attached to - original object will be inherited. - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned object + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + link : bool, optional + Whether clone should be linked + Determines whether Streams and Links attached to + original object will be inherited. + *args + Additional arguments to pass to constructor + **overrides + New keyword arguments to pass to constructor + + Returns + ------- + Cloned object """ callback = overrides.pop('callback', self.callback) if data is None and shared_data: @@ -1064,14 +1145,15 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, def reset(self): - "Clear the DynamicMap cache" + """Clear the DynamicMap cache + + """ self.data = {} return self def _cross_product(self, tuple_key, cache, data_slice): - """ - Returns a new DynamicMap if the key (tuple form) expresses a + """Returns a new DynamicMap if the key (tuple form) expresses a cross product, otherwise returns None. The cache argument is a dictionary (key:element pairs) of all the data found in the cache for this key. @@ -1082,6 +1164,7 @@ def _cross_product(self, tuple_key, cache, data_slice): The data_slice may specify slices into each value in the the cross-product. + """ if not any(isinstance(el, (list, set)) for el in tuple_key): return None @@ -1114,10 +1197,10 @@ def _cross_product(self, tuple_key, cache, data_slice): def _slice_bounded(self, tuple_key, data_slice): - """ - Slices bounded DynamicMaps by setting the soft_ranges on + """Slices bounded DynamicMaps by setting the soft_ranges on key dimensions and applies data slice to cached and dynamic values. + """ slices = [el for el in tuple_key if isinstance(el, slice)] if any(el.step for el in slices): @@ -1160,16 +1243,19 @@ def __getitem__(self, key): values per Dimension are defined. Once values are in the cache the DynamicMap can be cast to a HoloMap. - Args: - key: n-dimensional key corresponding to the key dimensions - Scalar values will be evaluated as normal while lists - of values will be combined to form the cross-product, - making it possible to evaluate many keys at once. - - Returns: - Returns evaluated callback return value for scalar key - otherwise returns cloned DynamicMap containing the cross- - product of evaluated items. + Parameters + ---------- + key + n-dimensional key corresponding to the key dimensions + Scalar values will be evaluated as normal while lists + of values will be combined to form the cross-product, + making it possible to evaluate many keys at once. + + Returns + ------- + Returns evaluated callback return value for scalar key + otherwise returns cloned DynamicMap containing the cross- + product of evaluated items. """ self._current_key = key @@ -1245,18 +1331,20 @@ def select(self, selection_specs=None, **kwargs): ds.select(x=[0, 1, 2]) - Args: - selection_specs: List of specs to match on - A list of types, functions, or type[.group][.label] - strings specifying which objects to apply the - selection on. - **selection: Dictionary declaring selections by dimension - Selections can be scalar values, tuple ranges, lists - of discrete values and boolean arrays - - Returns: - Returns an Dimensioned object containing the selected data - or a scalar if a single value was selected + Parameters + ---------- + selection_specs : List of specs to match on + A list of types, functions, or type[.group][.label] + strings specifying which objects to apply the + selection on. + **selection: Dictionary declaring selections by dimension + Selections can be scalar values, tuple ranges, lists + of discrete values and boolean arrays + + Returns + ------- + Returns an Dimensioned object containing the selected data + or a scalar if a single value was selected """ if selection_specs is not None and not isinstance(selection_specs, (list, tuple)): selection_specs = [selection_specs] @@ -1280,8 +1368,8 @@ def dynamic_select(obj, **dynkwargs): def _cache(self, key, val): - """ - Request that a key/value pair be considered for caching. + """Request that a key/value pair be considered for caching. + """ cache_size = (1 if util.dimensionless_contents( self.streams, self.kdims, no_duplicates=not self.positional_stream_args) @@ -1301,16 +1389,18 @@ def map(self, map_fn, specs=None, clone=True, link_inputs=True): dmap.map(fn, hv.Curve) - Args: - map_fn: Function to apply to each object - specs: List of specs to match - List of types, functions or type[.group][.label] specs - to select objects to return, by default applies to all - objects. - clone: Whether to clone the object or transform inplace - - Returns: - Returns the object after the map_fn has been applied + Parameters + ---------- + map_fn : Function to apply to each object + specs : List of specs to match + List of types, functions or type[.group][.label] specs + to select objects to return, by default applies to all + objects. + clone : Whether to clone the object or transform inplace + + Returns + ------- + Returns the object after the map_fn has been applied """ deep_mapped = super().map(map_fn, specs, clone) if isinstance(deep_mapped, type(self)): @@ -1329,15 +1419,20 @@ def relabel(self, label=None, group=None, depth=1): Applies relabeling to children up to the supplied depth. - Args: - label (str, optional): New label to apply to returned object - group (str, optional): New group to apply to returned object - depth (int, optional): Depth to which relabel will be applied - If applied to container allows applying relabeling to - contained objects up to the specified depth - - Returns: - Returns relabelled object + Parameters + ---------- + label : str, optional + New label to apply to returned object + group : str, optional + New group to apply to returned object + depth : int, optional + Depth to which relabel will be applied + If applied to container allows applying relabeling to + contained objects up to the specified depth + + Returns + ------- + Returns relabelled object """ relabelled = super().relabel(label, group, depth) if depth > 0: @@ -1353,9 +1448,9 @@ def dynamic_relabel(obj, **dynkwargs): return relabelled def _split_overlays(self): - """ - Splits a DynamicMap into its components. Only well defined for + """Splits a DynamicMap into its components. Only well defined for DynamicMap with consistent number and order of layers. + """ if not len(self): raise ValueError('Cannot split DynamicMap before it has been initialized') @@ -1397,8 +1492,9 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns a non-dynamic element + Returns + ------- + DynamicMap that returns a non-dynamic element """ from .decollate import decollate return decollate(self) @@ -1411,8 +1507,9 @@ def collate(self): containing DynamicMaps. Assumes that the items in the layout or grid that is returned do not change. - Returns: - Collated container containing DynamicMaps + Returns + ------- + Collated container containing DynamicMaps """ # Initialize if self.last is not None: @@ -1509,16 +1606,18 @@ def groupby(self, dimensions=None, container_type=None, group_type=None, **kwarg returning an object of type container_type (expected to be dictionary-like) containing the groups. - Args: - dimensions: Dimension(s) to group by - container_type: Type to cast group container to - group_type: Type to cast each group to - dynamic: Whether to return a DynamicMap - **kwargs: Keyword arguments to pass to each group - - Returns: - Returns object of supplied container_type containing the - groups. If dynamic=True returns a DynamicMap instead. + Parameters + ---------- + dimensions : Dimension(s) to group by + container_type : Type to cast group container to + group_type : Type to cast each group to + dynamic : Whether to return a DynamicMap + **kwargs: Keyword arguments to pass to each group + + Returns + ------- + Returns object of supplied container_type containing the + groups. If dynamic=True returns a DynamicMap instead. """ if dimensions is None: dimensions = self.kdims @@ -1591,32 +1690,34 @@ def inner_fn(outer_vals, *key, **dynkwargs): def grid(self, dimensions=None, **kwargs): - """ - Groups data by supplied dimension(s) laying the groups along + """Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a GridSpace. - Args: - dimensions: Dimension/str or list + Parameters + ---------- + dimensions : Dimension/str or list Dimension or list of dimensions to group by - Returns: - grid: GridSpace + Returns + ------- + grid : GridSpace GridSpace with supplied dimensions """ return self.groupby(dimensions, container_type=GridSpace, **kwargs) def layout(self, dimensions=None, **kwargs): - """ - Groups data by supplied dimension(s) laying the groups along + """Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a NdLayout. - Args: - dimensions: Dimension/str or list + Parameters + ---------- + dimensions : Dimension/str or list Dimension or list of dimensions to group by - Returns: - layout: NdLayout + Returns + ------- + layout : NdLayout NdLayout with supplied dimensions """ return self.groupby(dimensions, container_type=NdLayout, **kwargs) @@ -1628,11 +1729,13 @@ def overlay(self, dimensions=None, **kwargs): Groups data by supplied dimension(s) overlaying the groups along the dimension(s). - Args: - dimensions: Dimension(s) of dimensions to group by + Parameters + ---------- + dimensions : Dimension(s) of dimensions to group by - Returns: - NdOverlay object(s) with supplied dimensions + Returns + ------- + NdOverlay object(s) with supplied dimensions """ if dimensions is None: dimensions = self.kdims @@ -1652,15 +1755,20 @@ def hist(self, dimension=None, num_bins=20, bin_range=None, Defaults to first value dimension if present otherwise falls back to first key dimension. - Args: - dimension: Dimension(s) to compute histogram on - num_bins (int, optional): Number of bins - bin_range (tuple optional): Lower and upper bounds of bins - adjoin (bool, optional): Whether to adjoin histogram - - Returns: - AdjointLayout of DynamicMap and adjoined histogram if - adjoin=True, otherwise just the histogram + Parameters + ---------- + dimension : Dimension(s) to compute histogram on + num_bins : int, optional + Number of bins + bin_range : tuple, optional + Lower and upper bounds of bins + adjoin : bool, optional + Whether to adjoin histogram + + Returns + ------- + AdjointLayout of DynamicMap and adjoined histogram if + adjoin=True, otherwise just the histogram """ def dynamic_hist(obj, **dynkwargs): if isinstance(obj, (NdOverlay, Overlay)): @@ -1689,12 +1797,14 @@ def reindex(self, kdims=None, force=False): Create a new object with a reordered set of key dimensions. Dropping dimensions is not allowed on a DynamicMap. - Args: - kdims: List of dimensions to reindex the mapping with - force: Not applicable to a DynamicMap + Parameters + ---------- + kdims : List of dimensions to reindex the mapping with + force : Not applicable to a DynamicMap - Returns: - Reindexed DynamicMap + Returns + ------- + Reindexed DynamicMap """ if kdims is None: kdims = [] @@ -1725,13 +1835,13 @@ def __next__(self): class GridSpace(Layoutable, UniformNdMapping): - """ - Grids are distinct from Layouts as they ensure all contained + """Grids are distinct from Layouts as they ensure all contained elements to be of the same type. Unlike Layouts, which have integer keys, Grids usually have floating point keys, which correspond to a grid sampling in some two-dimensional space. This two-dimensional space may have to arbitrary dimensions, e.g. for 2D parameter spaces. + """ kdims = param.List(default=[Dimension("X"), Dimension("Y")], bounds=(1,2)) @@ -1743,7 +1853,9 @@ def __init__(self, initial_items=None, kdims=None, **params): def __lshift__(self, other): - "Adjoins another object to the GridSpace" + """Adjoins another object to the GridSpace + + """ if isinstance(other, (ViewableElement, UniformNdMapping)): return AdjointLayout([self, other]) elif isinstance(other, AdjointLayout): @@ -1755,11 +1867,14 @@ def __lshift__(self, other): def _transform_indices(self, key): """Snaps indices into the GridSpace to the closest coordinate. - Args: - key: Tuple index into the GridSpace + Parameters + ---------- + key + Tuple index into the GridSpace - Returns: - Transformed key snapped to closest numeric coordinates + Returns + ------- + Transformed key snapped to closest numeric coordinates """ ndims = self.ndims if all(not (isinstance(el, slice) or callable(el)) for el in key): @@ -1799,10 +1914,13 @@ def _transform_indices(self, key): def keys(self, full_grid=False): """Returns the keys of the GridSpace - Args: - full_grid (bool, optional): Return full cross-product of keys + Parameters + ---------- + full_grid : bool, optional + Return full cross-product of keys - Returns: + Returns + ------- List of keys """ keys = super().keys() @@ -1815,11 +1933,11 @@ def keys(self, full_grid=False): @property def last(self): - """ - The last of a GridSpace is another GridSpace + """The last of a GridSpace is another GridSpace constituted of the last of the individual elements. To access the elements by their X,Y position, either index the position directly or use the items() method. + """ if self.type == HoloMap: last_items = [(k, v.last if isinstance(v, HoloMap) else v) @@ -1830,16 +1948,18 @@ def last(self): def __len__(self): - """ - The maximum depth of all the elements. Matches the semantics + """The maximum depth of all the elements. Matches the semantics of __len__ used by Maps. For the total number of elements, count the full set of keys. + """ return max([(len(v) if hasattr(v, '__len__') else 1) for v in self.values()] + [0]) @property def shape(self): - "Returns the 2D shape of the GridSpace as (rows, cols)." + """Returns the 2D shape of the GridSpace as (rows, cols). + + """ keys = self.keys() if self.ndims == 1: return (len(keys), 1) @@ -1858,24 +1978,24 @@ def decollate(self): positional_stream_args=True, and the callback function accepts stream values as positional dict arguments. - Returns: - DynamicMap that returns a GridSpace + Returns + ------- + DynamicMap that returns a GridSpace """ from .decollate import decollate return decollate(self) class GridMatrix(GridSpace): - """ - GridMatrix is container type for heterogeneous Element types + """GridMatrix is container type for heterogeneous Element types laid out in a grid. Unlike a GridSpace the axes of the Grid must not represent an actual coordinate space, but may be used to plot various dimensions against each other. The GridMatrix is usually constructed using the gridmatrix operation, which will generate a GridMatrix plotting each dimension in an Element against each other. - """ + """ def _item_check(self, dim_vals, data): if not traversal.uniform(NdMapping([(0, self), (1, data)])): diff --git a/holoviews/core/traversal.py b/holoviews/core/traversal.py index 49045975ad..33c33f0ca7 100644 --- a/holoviews/core/traversal.py +++ b/holoviews/core/traversal.py @@ -1,7 +1,7 @@ -""" -Advanced utilities for traversing nesting/hierarchical Dimensioned +"""Advanced utilities for traversing nesting/hierarchical Dimensioned objects either to inspect the structure of their declared dimensions or mutate the matching elements. + """ from collections import defaultdict @@ -18,10 +18,10 @@ def create_ndkey(length, indexes, values): return tuple(key) def uniform(obj): - """ - Finds all common dimension keys in the object including subsets of + """Finds all common dimension keys in the object including subsets of dimensions. If there are is no common subset of dimensions, None is returned. + """ from .spaces import HoloMap dim_groups = obj.traverse(lambda x: tuple(x.kdims), @@ -33,13 +33,13 @@ def uniform(obj): def unique_dimkeys(obj, default_dim='Frame'): - """ - Finds all common dimension keys in the object including subsets of + """Finds all common dimension keys in the object including subsets of dimensions. If there are is no common subset of dimensions, None is returned. Returns the list of dimensions followed by the list of unique keys. + """ from .ndmapping import NdMapping, item_check from .spaces import HoloMap @@ -106,13 +106,13 @@ def bijective(keys): def hierarchical(keys): - """ - Iterates over dimension values in keys, taking two sets + """Iterates over dimension values in keys, taking two sets of dimension values at a time to determine whether two consecutive dimensions have a one-to-many relationship. If they do a mapping between the first and second dimension values is returned. Returns a list of n-1 mappings, between consecutive dimensions. + """ ndims = len(keys[0]) if ndims <= 1: diff --git a/holoviews/core/tree.py b/holoviews/core/tree.py index 3e1ea39146..b8b619064f 100644 --- a/holoviews/core/tree.py +++ b/holoviews/core/tree.py @@ -4,8 +4,7 @@ class AttrTree: - """ - An AttrTree offers convenient, multi-level attribute access for + """An AttrTree offers convenient, multi-level attribute access for collections of objects. AttrTree objects may also be combined together using the update method or merge classmethod. Here is an example of adding a ViewableElement to an AttrTree and accessing it: @@ -14,14 +13,16 @@ class AttrTree: >>> t.Example.Path = 1 >>> t.Example.Path #doctest: +ELLIPSIS 1 + """ + _disabled_prefixes = [] # Underscore attributes that should be _sanitizer = util.sanitize_identifier @classmethod def merge(cls, trees): - """ - Merge a collection of AttrTree objects. + """Merge a collection of AttrTree objects. + """ first = trees[0] for tree in trees: @@ -30,9 +31,9 @@ def merge(cls, trees): def __dir__(self): - """ - The _dir_mode may be set to 'default' or 'user' in which case + """The _dir_mode may be set to 'default' or 'user' in which case only the child nodes added by the user are listed. + """ dict_keys = self.__dict__.keys() if self.__dict__['_dir_mode'] == 'user': @@ -42,13 +43,19 @@ def __dir__(self): def __init__(self, items=None, identifier=None, parent=None, dir_mode='default'): """ - identifier: A string identifier for the current node (if any) - parent: The parent node (if any) - items: Items as (path, value) pairs to construct - (sub)tree down to given leaf values. + Parameters + ---------- + items + Items as (path, value) pairs to construct + (sub)tree down to given leaf values. + identifier + A string identifier for the current node (if any) + parent + The parent node (if any) Note that the root node does not have a parent and does not require an identifier. + """ self.__dict__['parent'] = parent self.__dict__['identifier'] = type(self)._sanitizer(identifier, escape=False) @@ -75,7 +82,9 @@ def root(self): @property def path(self): - "Returns the path up to the root for the current node." + """Returns the path up to the root for the current node. + + """ if self.parent: return '.'.join([self.parent.path, str(self.identifier)]) else: @@ -84,7 +93,9 @@ def path(self): @property def fixed(self): - "If fixed, no new paths can be created via attribute access" + """If fixed, no new paths can be created via attribute access + + """ return self.__dict__['_fixed'] @fixed.setter @@ -93,9 +104,9 @@ def fixed(self, val): def update(self, other): - """ - Updated the contents of the current AttrTree with the + """Updated the contents of the current AttrTree with the contents of a second AttrTree. + """ if not isinstance(other, AttrTree): raise Exception('Can only update with another AttrTree type.') @@ -110,9 +121,9 @@ def update(self, other): def set_path(self, path, val): - """ - Set the given value at the supplied path where path is either + """Set the given value at the supplied path where path is either a tuple of strings or a string in A.B.C format. + """ path = tuple(path.split('.')) if isinstance(path , str) else tuple(path) @@ -128,8 +139,8 @@ def set_path(self, path, val): def filter(self, path_filters): - """ - Filters the loaded AttrTree using the supplied path_filters. + """Filters the loaded AttrTree using the supplied path_filters. + """ if not path_filters: return self @@ -147,8 +158,8 @@ def filter(self, path_filters): def _propagate(self, path, val): - """ - Propagate the value up to the root node. + """Propagate the value up to the root node. + """ if val == '_DELETE': if path in self.data: @@ -164,11 +175,11 @@ def _propagate(self, path, val): def __setitem__(self, identifier, val): - """ - Set a value at a child node with given identifier. If at a root + """Set a value at a child node with given identifier. If at a root node, multi-level path specifications is allowed (i.e. 'A.B.C' format or tuple format) in which case the behaviour matches that of set_path. + """ if isinstance(identifier, str) and '.' not in identifier: self.__setattr__(identifier, val) @@ -181,11 +192,11 @@ def __setitem__(self, identifier, val): def __getitem__(self, identifier): - """ - For a given non-root node, access a child element by identifier. + """For a given non-root node, access a child element by identifier. If the node is a root node, you may also access elements using either tuple format or the 'A.B.C' string format. + """ split_label = (tuple(identifier.split('.')) if isinstance(identifier, str) else tuple(identifier)) @@ -235,9 +246,9 @@ def __setattr__(self, identifier, val): def __getattr__(self, identifier): - """ - Access a identifier from the AttrTree or generate a new AttrTree + """Access a identifier from the AttrTree or generate a new AttrTree with the chosen attribute path. + """ try: return super().__getattr__(identifier) @@ -287,12 +298,16 @@ def __len__(self): def get(self, identifier, default=None): """Get a node of the AttrTree using its path string. - Args: - identifier: Path string of the node to return - default: Value to return if no node is found + Parameters + ---------- + identifier + Path string of the node to return + default + Value to return if no node is found - Returns: - The indexed node of the AttrTree + Returns + ------- + The indexed node of the AttrTree """ split_label = (tuple(identifier.split('.')) if isinstance(identifier, str) else tuple(identifier)) @@ -307,29 +322,39 @@ def get(self, identifier, default=None): return path_item def keys(self): - "Keys of nodes in the AttrTree" + """Keys of nodes in the AttrTree + + """ return list(self.data.keys()) def items(self): - "Keys and nodes of the AttrTree" + """Keys and nodes of the AttrTree + + """ return list(self.data.items()) def values(self): - "Nodes of the AttrTree" + """Nodes of the AttrTree + + """ return list(self.data.values()) def pop(self, identifier, default=None): """Pop a node of the AttrTree using its path string. - Args: - identifier: Path string of the node to return - default: Value to return if no node is found + Parameters + ---------- + identifier + Path string of the node to return + default + Value to return if no node is found - Returns: - The node that was removed from the AttrTree + Returns + ------- + The node that was removed from the AttrTree """ if identifier in self.children: item = self[identifier] diff --git a/holoviews/core/util.py b/holoviews/core/util.py index 05daabb7aa..3856644b98 100644 --- a/holoviews/core/util.py +++ b/holoviews/core/util.py @@ -109,7 +109,10 @@ class VersionError(Exception): - "Raised when there is a library version mismatch." + """Raised when there is a library version mismatch. + + """ + def __init__(self, msg, version=None, min_version=None, **kwargs): self.version = version self.min_version = min_version @@ -117,7 +120,9 @@ def __init__(self, msg, version=None, min_version=None, **kwargs): def _no_import_version(name) -> tuple[int, int, int]: - """ Get version number without importing the library """ + """Get version number without importing the library + + """ try: return Version(version(name)).release except PackageNotFoundError: @@ -125,10 +130,10 @@ def _no_import_version(name) -> tuple[int, int, int]: class Config(param.ParameterizedFunction): - """ - Set of boolean configuration values to change HoloViews' global + """Set of boolean configuration values to change HoloViews' global behavior. Typically used to control warnings relating to deprecations or set global parameter such as style 'themes'. + """ future_deprecations = param.Boolean(default=False, doc=""" @@ -176,8 +181,7 @@ def _int_to_bytes(i): class HashableJSON(json.JSONEncoder): - """ - Extends JSONEncoder to generate a hashable string for as many types + """Extends JSONEncoder to generate a hashable string for as many types of object as possible including nested objects and objects that are not normally hashable. The purpose of this class is to generate unique strings that once hashed are suitable for use in memoization @@ -196,7 +200,9 @@ class HashableJSON(json.JSONEncoder): One limitation of this approach is that dictionaries with composite keys (e.g. tuples) are not supported due to the JSON spec. + """ + string_hashable = (dt.datetime,) repr_hashable = () @@ -245,8 +251,7 @@ def default(self, obj): def merge_option_dicts(old_opts, new_opts): - """ - Update the old_opts option dictionary with the options defined in + """Update the old_opts option dictionary with the options defined in new_opts. Instead of a shallow update as would be performed by calling old_opts.update(new_opts), this updates the dictionaries of all option types separately. @@ -257,6 +262,7 @@ def merge_option_dicts(old_opts, new_opts): new_opts = {'a': {'y': 'new', 'z': 'new'}, 'b': {'k': 'new'}} this returns a dictionary {'a': {'x': 'old', 'y': 'new', 'z': 'new'}, 'b': {'k': 'new'}} + """ merged = dict(old_opts) @@ -270,9 +276,9 @@ def merge_option_dicts(old_opts, new_opts): def merge_options_to_dict(options): - """ - Given a collection of Option objects or partial option dictionaries, + """Given a collection of Option objects or partial option dictionaries, merge everything to a single dictionary. + """ merged_options = {} for obj in options: @@ -286,11 +292,11 @@ def merge_options_to_dict(options): def deprecated_opts_signature(args, kwargs): - """ - Utility to help with the deprecation of the old .opts method signature + """Utility to help with the deprecation of the old .opts method signature Returns whether opts.apply_groups should be used (as a bool) and the corresponding options. + """ from .options import Options groups = set(Options._option_groups) @@ -319,10 +325,10 @@ def deprecated_opts_signature(args, kwargs): class periodic(Thread): - """ - Run a callback count times with a given period without blocking. + """Run a callback count times with a given period without blocking. If count is None, will run till timeout (which may be forever if None). + """ def __init__(self, period, count, callback, timeout=None, block=False): @@ -388,9 +394,9 @@ def run(self): def deephash(obj): - """ - Given an object, return a hash using HashableJSON. This hash is not + """Given an object, return a hash using HashableJSON. This hash is not architecture, Python version or platform independent. + """ try: return hash(json.dumps(obj, cls=HashableJSON, sort_keys=True)) @@ -399,12 +405,12 @@ def deephash(obj): def tree_attribute(identifier): - """ - Predicate that returns True for custom attributes added to AttrTrees + """Predicate that returns True for custom attributes added to AttrTrees that are not methods, properties or internal attributes. These custom attributes start with a capitalized character when applicable (not applicable to underscore or certain unicode characters) + """ if identifier == '': return True @@ -414,13 +420,13 @@ def tree_attribute(identifier): return identifier[0].isupper() def argspec(callable_obj): - """ - Returns an ArgSpec object for functions, staticmethods, instance + """Returns an ArgSpec object for functions, staticmethods, instance methods, classmethods and partials. Note that the args list for instance and class methods are those as seen by the user. In other words, the first argument which is conventionally called 'self' or 'cls' is omitted in these cases. + """ if (isinstance(callable_obj, type) and issubclass(callable_obj, param.ParameterizedFunction)): @@ -451,8 +457,7 @@ def argspec(callable_obj): def validate_dynamic_argspec(callback, kdims, streams): - """ - Utility used by DynamicMap to ensure the supplied callback has an + """Utility used by DynamicMap to ensure the supplied callback has an appropriate signature. If validation succeeds, returns a list of strings to be zipped with @@ -467,6 +472,7 @@ def validate_dynamic_argspec(callback, kdims, streams): If the callback doesn't use **kwargs, the accepted keywords are validated against the stream parameter names. + """ argspec = callback.argspec name = callback.name @@ -517,8 +523,8 @@ def validate_dynamic_argspec(callback, kdims, streams): def callable_name(callable_obj): - """ - Attempt to return a meaningful name identifying a callable or generator + """Attempt to return a meaningful name identifying a callable or generator + """ try: if (isinstance(callable_obj, type) @@ -542,8 +548,7 @@ def callable_name(callable_obj): def process_ellipses(obj, key, vdim_selection=False): - """ - Helper function to pad a __getitem__ key with the right number of + """Helper function to pad a __getitem__ key with the right number of empty slices (i.e. :) when the key contains an Ellipsis (...). If the vdim_selection flag is true, check if the end of the key @@ -551,6 +556,7 @@ def process_ellipses(obj, key, vdim_selection=False): will not be applied for the value dimensions (i.e. the resulting key will be exactly one longer than the number of kdims). Note: this flag should not be used for composite types. + """ if getattr(getattr(key, 'dtype', None), 'kind', None) == 'b': return key @@ -574,8 +580,8 @@ def process_ellipses(obj, key, vdim_selection=False): def bytes_to_unicode(value): - """ - Safely casts bytestring to unicode + """Safely casts bytestring to unicode + """ if isinstance(value, bytes): return value.decode('utf-8') @@ -583,8 +589,8 @@ def bytes_to_unicode(value): def get_method_owner(method): - """ - Gets the instance that owns the supplied method + """Gets the instance that owns the supplied method + """ if isinstance(method, partial): method = method.func @@ -592,10 +598,10 @@ def get_method_owner(method): def capitalize_unicode_name(s): - """ - Turns a string such as 'capital delta' into the shortened, + """Turns a string such as 'capital delta' into the shortened, capitalized version, in this case simply 'Delta'. Used as a transform in sanitize_identifier. + """ index = s.find('capital') if index == -1: return s @@ -605,8 +611,7 @@ def capitalize_unicode_name(s): class sanitize_identifier_fn(param.ParameterizedFunction): - """ - Sanitizes group/label values for use in AttrTree attribute + """Sanitizes group/label values for use in AttrTree attribute access. Special characters are sanitized using their (lowercase) unicode @@ -618,6 +623,7 @@ class sanitize_identifier_fn(param.ParameterizedFunction): As these names are often very long, this parameterized function allows filtered, substitutions and transforms to help shorten these names appropriately. + """ capitalize = param.Boolean(default=True, doc=""" @@ -673,16 +679,16 @@ class sanitize_identifier_fn(param.ParameterizedFunction): @param.parameterized.bothmethod def add_aliases(self_or_cls, **kwargs): - """ - Conveniently add new aliases as keyword arguments. For instance + """Conveniently add new aliases as keyword arguments. For instance you can add a new alias with add_aliases(short='Longer string') + """ self_or_cls.aliases.update({v:k for k,v in kwargs.items()}) @param.parameterized.bothmethod def remove_aliases(self_or_cls, aliases): - """ - Remove a list of aliases. + """Remove a list of aliases. + """ for k,v in self_or_cls.aliases.items(): if v in aliases: @@ -702,9 +708,9 @@ def allowable(self_or_cls, name, disable_leading_underscore=None): @param.parameterized.bothmethod def prefixed(self, identifier): - """ - Whether or not the identifier will be prefixed. + """Whether or not the identifier will be prefixed. Strings that require the prefix are generally not recommended. + """ invalid_starting = ['Mn', 'Mc', 'Nd', 'Pc'] if identifier.startswith('_'): return True @@ -712,9 +718,10 @@ def prefixed(self, identifier): @param.parameterized.bothmethod def remove_diacritics(self_or_cls, identifier): + """Remove diacritics and accents from the input leaving other + unicode characters alone. + """ - Remove diacritics and accents from the input leaving other - unicode characters alone.""" chars = '' for c in identifier: replacement = unicodedata.normalize('NFKD', c).encode('ASCII', 'ignore') @@ -726,10 +733,10 @@ def remove_diacritics(self_or_cls, identifier): @param.parameterized.bothmethod def shortened_character_name(self_or_cls, c, eliminations=None, substitutions=None, transforms=None): - """ - Given a unicode character c, return the shortened unicode name + """Given a unicode character c, return the shortened unicode name (as a list of tokens) by applying the eliminations, substitutions and transforms. + """ if transforms is None: transforms = [] @@ -771,7 +778,9 @@ def __call__(self, name, escape=True): def _process_underscores(self, tokens): - "Strip underscores to make sure the number is correct after join" + """Strip underscores to make sure the number is correct after join + + """ groups = [[str(''.join(el))] if b else list(el) for (b,el) in itertools.groupby(tokens, lambda k: k=='_')] flattened = [el for group in groups for el in group] @@ -793,7 +802,9 @@ def sanitize_py3(self, name): return name def sanitize(self, name, valid_fn): - "Accumulate blocks of hex and separate blocks by underscores" + """Accumulate blocks of hex and separate blocks by underscores + + """ invalid = {'\a':'a','\b':'b', '\v':'v','\f':'f','\r':'r'} for cc in filter(lambda el: el in name, invalid.keys()): raise Exception(rf"Please use a raw string or escape control code '\{invalid[cc]}'") @@ -822,8 +833,8 @@ def sanitize(self, name, valid_fn): dimension_sanitizer = sanitize_identifier_fn.instance(capitalize=False) def isscalar(val): - """ - Value is scalar or None + """Value is scalar or None + """ return val is None or np.isscalar(val) or isinstance(val, datetime_types) @@ -842,6 +853,7 @@ def isequal(value1, value2): """Compare two values, returning a boolean. Will apply the comparison to all elements of an array/dataframe. + """ try: check = (value1 is value2) or (value1 == value2) @@ -853,9 +865,9 @@ def isequal(value1, value2): def asarray(arraylike, strict=True): - """ - Converts arraylike objects to NumPy ndarray types. Errors if + """Converts arraylike objects to NumPy ndarray types. Errors if object is not arraylike and strict option is enabled. + """ if isinstance(arraylike, np.ndarray): return arraylike @@ -873,8 +885,8 @@ def asarray(arraylike, strict=True): nat_as_integer = np.datetime64('NAT').view('i8') def isnat(val): - """ - Checks if the value is a NaT. Should only be called on datetimelike objects. + """Checks if the value is a NaT. Should only be called on datetimelike objects. + """ if (isinstance(val, (np.datetime64, np.timedelta64)) or (isinstance(val, np.ndarray) and val.dtype.kind == 'M')): @@ -888,9 +900,9 @@ def isnat(val): def isfinite(val): - """ - Helper function to determine if scalar or array value is finite extending + """Helper function to determine if scalar or array value is finite extending np.isfinite with support for None, string, datetime types. + """ is_dask = is_dask_array(val) if not np.isscalar(val) and not is_dask: @@ -926,8 +938,8 @@ def isfinite(val): def isdatetime(value): - """ - Whether the array or scalar is recognized datetime type. + """Whether the array or scalar is recognized datetime type. + """ if isinstance(value, np.ndarray): return (value.dtype.kind == "M" or @@ -938,10 +950,10 @@ def isdatetime(value): def find_minmax(lims, olims): - """ - Takes (a1, a2) and (b1, b2) as input and returns + """Takes (a1, a2) and (b1, b2) as input and returns (np.nanmin(a1, b1), np.nanmax(a2, b2)). Used to calculate min and max values of a number of items. + """ try: limzip = zip(list(lims), list(olims), [np.nanmin, np.nanmax]) @@ -952,10 +964,10 @@ def find_minmax(lims, olims): def find_range(values, soft_range=None): - """ - Safely finds either the numerical min and max of + """Safely finds either the numerical min and max of a set of values, falling back to the first and the last value in the sorted list of values. + """ if soft_range is None: soft_range = [] @@ -978,17 +990,20 @@ def find_range(values, soft_range=None): def max_range(ranges, combined=True): - """ - Computes the maximal lower and upper bounds from a list bounds. + """Computes the maximal lower and upper bounds from a list bounds. - Args: - ranges (list of tuples): A list of range tuples - combined (boolean, optional): Whether to combine bounds - Whether range should be computed on lower and upper bound - independently or both at once + Parameters + ---------- + ranges : list of tuples + A list of range tuples + combined : boolean, optional + Whether to combine bounds + Whether range should be computed on lower and upper bound + independently or both at once - Returns: - The maximum range as a single tuple + Returns + ------- + The maximum range as a single tuple """ try: with warnings.catch_warnings(): @@ -1029,8 +1044,8 @@ def max_range(ranges, combined=True): def range_pad(lower, upper, padding=None, log=False): - """ - Pads the range by a fraction of the interval + """Pads the range by a fraction of the interval + """ if padding is not None and not isinstance(padding, tuple): padding = (padding, padding) @@ -1059,9 +1074,9 @@ def range_pad(lower, upper, padding=None, log=False): def dimension_range(lower, upper, hard_range, soft_range, padding=None, log=False): - """ - Computes the range along a dimension by combining the data range + """Computes the range along a dimension by combining the data range with the Dimension soft_range and range. + """ plower, pupper = range_pad(lower, upper, padding, log) if isfinite(soft_range[0]) and soft_range[0] <= lower: @@ -1079,11 +1094,11 @@ def dimension_range(lower, upper, hard_range, soft_range, padding=None, log=Fals def max_extents(extents, zrange=False): - """ - Computes the maximal extent in 2D and 3D space from + """Computes the maximal extent in 2D and 3D space from list of 4-tuples or 6-tuples. If zrange is enabled all extents are converted to 6-tuples to compute x-, y- and z-limits. + """ if zrange: num = 6 @@ -1119,7 +1134,9 @@ def max_extents(extents, zrange=False): def int_to_alpha(n, upper=True): - "Generates alphanumeric labels of form A-Z, AA-ZZ etc." + """Generates alphanumeric labels of form A-Z, AA-ZZ etc. + + """ casenum = 65 if upper else 97 label = '' count= 0 @@ -1155,9 +1172,9 @@ def int_to_roman(input): def unique_iterator(seq): - """ - Returns an iterator containing all non-duplicate elements + """Returns an iterator containing all non-duplicate elements in the input sequence. + """ seen = set() for item in seq: @@ -1167,28 +1184,30 @@ def unique_iterator(seq): def lzip(*args): - """ - zip function that returns a list. + """Zip function that returns a list. + """ return list(zip(*args)) def unique_zip(*args): - """ - Returns a unique list of zipped values. + """Returns a unique list of zipped values. + """ return list(unique_iterator(zip(*args))) def unique_array(arr): - """ - Returns an array of unique values in the input order. + """Returns an array of unique values in the input order. - Args: - arr (np.ndarray or list): The array to compute unique values on + Parameters + ---------- + arr : np.ndarray or list + The array to compute unique values on - Returns: - A new array of unique values + Returns + ------- + A new array of unique values """ if not len(arr): return np.asarray(arr) @@ -1209,10 +1228,10 @@ def unique_array(arr): def match_spec(element, specification): - """ - Matches the group.label specification of the supplied + """Matches the group.label specification of the supplied element against the supplied specification dictionary returning the value of the best match. + """ match_tuple = () match = specification.get((), {}) @@ -1245,8 +1264,7 @@ def python2sort(x,key=None): def merge_dimensions(dimensions_list): - """ - Merges lists of fully or partially overlapping dimensions by + """Merges lists of fully or partially overlapping dimensions by merging their values. >>> from holoviews import Dimension @@ -1257,6 +1275,7 @@ def merge_dimensions(dimensions_list): [Dimension('A'), Dimension('B')] >>> dimensions[0].values [1, 2, 3, 4] + """ dvalues = defaultdict(list) dimensions = [] @@ -1271,9 +1290,9 @@ def merge_dimensions(dimensions_list): def dimension_sort(odict, kdims, vdims, key_index): - """ - Sorts data by key using usual Python tuple sorting semantics + """Sorts data by key using usual Python tuple sorting semantics or sorts in categorical order for any categorical Dimensions. + """ sortkws = {} ndims = len(kdims) @@ -1306,22 +1325,25 @@ def is_number(obj): def is_float(obj): - """ - Checks if the argument is a floating-point scalar. + """Checks if the argument is a floating-point scalar. + """ return isinstance(obj, (float, np.floating)) def is_int(obj, int_like=False): - """ - Checks for int types including the native Python type and NumPy-like objects + """Checks for int types including the native Python type and NumPy-like objects - Args: - obj: Object to check for integer type - int_like (boolean): Check for float types with integer value + Parameters + ---------- + obj + Object to check for integer type + int_like : boolean + Check for float types with integer value - Returns: - Boolean indicating whether the supplied value is of integer type. + Returns + ------- + Boolean indicating whether the supplied value is of integer type. """ real_int = isinstance(obj, int) or getattr(getattr(obj, 'dtype', None), 'kind', 'o') in 'ui' if real_int or (int_like and hasattr(obj, 'is_integer') and obj.is_integer()): @@ -1330,9 +1352,9 @@ def is_int(obj, int_like=False): class ProgressIndicator(param.Parameterized): - """ - Baseclass for any ProgressIndicator that indicates progress + """Baseclass for any ProgressIndicator that indicates progress as a completion percentage. + """ percent_range = param.NumericTuple(default=(0.0, 100.0), doc=""" @@ -1349,18 +1371,18 @@ def __call__(self, completion): def sort_topologically(graph): - """ - Stackless topological sorting. + """Stackless topological sorting. graph = { - 3: [1], - 5: [3], - 4: [2], - 6: [4], + 3 : [1], + 5 : [3], + 4 : [2], + 6 : [4], } sort_topologically(graph) [[1, 2], [3, 4], [5, 6]] + """ levels_by_name = {} names_by_level = defaultdict(list) @@ -1402,9 +1424,9 @@ def walk_depth_first(name): def is_cyclic(graph): - """ - Return True if the directed graph g has a cycle. The directed graph + """Return True if the directed graph g has a cycle. The directed graph should be represented as a dictionary mapping of edges for each node. + """ path = set() @@ -1420,18 +1442,18 @@ def visit(vertex): def one_to_one(graph, nodes): - """ - Return True if graph contains only one to one mappings. The + """Return True if graph contains only one to one mappings. The directed graph should be represented as a dictionary mapping of edges for each node. Nodes should be passed a simple list. + """ edges = itertools.chain.from_iterable(graph.values()) return len(graph) == len(nodes) and len(set(edges)) == len(nodes) def get_overlay_spec(o, k, v): - """ - Gets the type.group.label + key spec from an Element in an Overlay. + """Gets the type.group.label + key spec from an Element in an Overlay. + """ k = wrap_tuple(k) return ((type(v).__name__, v.group, v.label, *k) if len(o.kdims) else @@ -1439,9 +1461,9 @@ def get_overlay_spec(o, k, v): def layer_sort(hmap): - """ - Find a global ordering for layers in a HoloMap of CompositeOverlay + """Find a global ordering for layers in a HoloMap of CompositeOverlay types. + """ orderings = {} for o in hmap: @@ -1454,10 +1476,10 @@ def layer_sort(hmap): def layer_groups(ordering, length=2): - """ - Splits a global ordering of Layers into groups based on a slice of + """Splits a global ordering of Layers into groups based on a slice of the spec. The grouping behavior can be modified by changing the length of spec the entries are grouped by. + """ group_orderings = defaultdict(list) for el in ordering: @@ -1466,9 +1488,9 @@ def layer_groups(ordering, length=2): def group_select(selects, length=None, depth=None): - """ - Given a list of key tuples to select, groups them into sensible + """Given a list of key tuples to select, groups them into sensible chunks to avoid duplicating indexing operations. + """ if length is None and depth is None: length = depth = len(selects[0]) @@ -1484,9 +1506,9 @@ def group_select(selects, length=None, depth=None): def iterative_select(obj, dimensions, selects, depth=None): - """ - Takes the output of group_select selecting subgroups iteratively, + """Takes the output of group_select selecting subgroups iteratively, avoiding duplicating select operations. + """ ndims = len(dimensions) depth = depth if depth is not None else ndims @@ -1502,16 +1524,16 @@ def iterative_select(obj, dimensions, selects, depth=None): def get_spec(obj): - """ - Gets the spec from any labeled data object. + """Gets the spec from any labeled data object. + """ return (obj.__class__.__name__, obj.group, obj.label) def is_dataframe(data): - """ - Checks whether the supplied data is of DataFrame type. + """Checks whether the supplied data is of DataFrame type. + """ dd = None if 'dask.dataframe' in sys.modules and 'pandas' in sys.modules: @@ -1521,8 +1543,8 @@ def is_dataframe(data): def is_series(data): - """ - Checks whether the supplied data is of Series type. + """Checks whether the supplied data is of Series type. + """ dd = None if 'dask.dataframe' in sys.modules: @@ -1564,16 +1586,20 @@ def get_param_values(data): def is_param_method(obj, has_deps=False): """Whether the object is a method on a parameterized object. - Args: - obj: Object to check - has_deps (boolean, optional): Check for dependencies - Whether to also check whether the method has been annotated - with param.depends + Parameters + ---------- + obj + Object to check + has_deps : boolean, optional + Check for dependencies + Whether to also check whether the method has been annotated + with param.depends - Returns: - A boolean value indicating whether the object is a method - on a Parameterized object and if enabled whether it has any - dependencies + Returns + ------- + A boolean value indicating whether the object is a method + on a Parameterized object and if enabled whether it has any + dependencies """ parameterized = (inspect.ismethod(obj) and isinstance(get_method_owner(obj), param.Parameterized)) @@ -1589,12 +1615,15 @@ def resolve_dependent_value(value): parameterized functions with dependencies on the supplied value, including such parameters embedded in a list, tuple, dictionary, or slice. - Args: - value: A value which will be resolved + Parameters + ---------- + value + A value which will be resolved - Returns: - A new value where any parameter dependencies have been - resolved. + Returns + ------- + A new value where any parameter dependencies have been + resolved. """ from panel.widgets import RangeSlider @@ -1638,21 +1667,24 @@ def resolve_dependent_kwargs(kwargs): parameterized functions with dependencies in the supplied dictionary. - Args: - kwargs (dict): A dictionary of keyword arguments + Parameters + ---------- + kwargs : dict + A dictionary of keyword arguments - Returns: - A new dictionary where any parameter dependencies have been - resolved. + Returns + ------- + A new dictionary where any parameter dependencies have been + resolved. """ return {k: resolve_dependent_value(v) for k, v in kwargs.items()} @contextmanager def disable_constant(parameterized): - """ - Temporarily set parameters on Parameterized object to + """Temporarily set parameters on Parameterized object to constant=False. + """ params = parameterized.param.objects('existing').values() constants = [p.constant for p in params] @@ -1666,9 +1698,9 @@ def disable_constant(parameterized): def get_ndmapping_label(ndmapping, attr): - """ - Function to get the first non-auxiliary object + """Function to get the first non-auxiliary object label attribute from an NdMapping. + """ label = None els = iter(ndmapping.data.values()) @@ -1687,18 +1719,20 @@ def get_ndmapping_label(ndmapping, attr): def wrap_tuple(unwrapped): - """ Wraps any non-tuple types in a tuple """ + """Wraps any non-tuple types in a tuple + + """ return (unwrapped if isinstance(unwrapped, tuple) else (unwrapped,)) def stream_name_mapping(stream, exclude_params=None, reverse=False): - """ - Return a complete dictionary mapping between stream parameter names + """Return a complete dictionary mapping between stream parameter names to their applicable renames, excluding parameters listed in exclude_params. If reverse is True, the mapping is from the renamed strings to the original stream parameter names. + """ if exclude_params is None: exclude_params = ['name'] @@ -1719,13 +1753,13 @@ def stream_name_mapping(stream, exclude_params=None, reverse=False): return mapping def rename_stream_kwargs(stream, kwargs, reverse=False): - """ - Given a stream and a kwargs dictionary of parameter values, map to + """Given a stream and a kwargs dictionary of parameter values, map to the corresponding dictionary where the keys are substituted with the appropriately renamed string. If reverse, the output will be a dictionary using the original parameter names given a dictionary using the renamed equivalents. + """ mapped_kwargs = {} mapping = stream_name_mapping(stream, reverse=reverse) @@ -1739,12 +1773,12 @@ def rename_stream_kwargs(stream, kwargs, reverse=False): def stream_parameters(streams, no_duplicates=True, exclude=None): - """ - Given a list of streams, return a flat list of parameter name, + """Given a list of streams, return a flat list of parameter name, excluding those listed in the exclude list. If no_duplicates is enabled, a KeyError will be raised if there are parameter name clashes across the streams. + """ if exclude is None: exclude = ['name', '_memoize_key'] @@ -1780,26 +1814,26 @@ def stream_parameters(streams, no_duplicates=True, exclude=None): def dimensionless_contents(streams, kdims, no_duplicates=True): - """ - Return a list of stream parameters that have not been associated + """Return a list of stream parameters that have not been associated with any of the key dimensions. + """ names = stream_parameters(streams, no_duplicates) return [name for name in names if name not in kdims] def unbound_dimensions(streams, kdims, no_duplicates=True): - """ - Return a list of dimensions that have not been associated with + """Return a list of dimensions that have not been associated with any streams. + """ params = stream_parameters(streams, no_duplicates) return [d for d in kdims if d not in params] def wrap_tuple_streams(unwrapped, kdims, streams): - """ - Fills in tuple keys with dimensioned stream values as appropriate. + """Fills in tuple keys with dimensioned stream values as appropriate. + """ param_groups = [(s.contents.keys(), s) for s in streams] pairs = [(name,s) for (group, s) in param_groups for name in group] @@ -1815,8 +1849,8 @@ def wrap_tuple_streams(unwrapped, kdims, streams): def drop_streams(streams, kdims, keys): - """ - Drop any dimensioned streams from the keys and kdims. + """Drop any dimensioned streams from the keys and kdims. + """ stream_params = stream_parameters(streams) inds, dims = zip(*[(ind, kdim) for ind, kdim in enumerate(kdims) @@ -1844,8 +1878,8 @@ def unpack_group(group, getter): def capitalize(string): - """ - Capitalizes the first letter of a string. + """Capitalizes the first letter of a string. + """ if string: return string[0].upper() + string[1:] @@ -1854,10 +1888,10 @@ def capitalize(string): def get_path(item): - """ - Gets a path from an Labelled object or from a tuple of an existing + """Gets a path from an Labelled object or from a tuple of an existing path and a labelled object. The path strings are sanitized and capitalized. + """ sanitizers = [group_sanitizer, label_sanitizer] if isinstance(item, tuple): @@ -1875,9 +1909,9 @@ def get_path(item): def make_path_unique(path, counts, new): - """ - Given a path, a list of existing paths and counts for each of the + """Given a path, a list of existing paths and counts for each of the existing paths. + """ added = False while any(path == c[:i] for c in counts for i in range(1, len(c)+1)): @@ -1896,9 +1930,9 @@ def make_path_unique(path, counts, new): class ndmapping_groupby(param.ParameterizedFunction): - """ - Apply a groupby operation to an NdMapping, using pandas to improve + """Apply a groupby operation to an NdMapping, using pandas to improve performance (if available). + """ sort = param.Boolean(default=False, doc='Whether to apply a sorted groupby') @@ -1954,11 +1988,11 @@ def groupby_python(self_or_cls, ndmapping, dimensions, container_type, def cartesian_product(arrays, flat=True, copy=False): - """ - Efficient cartesian product of a list of 1D arrays returning the + """Efficient cartesian product of a list of 1D arrays returning the expanded array views for each dimensions. By default arrays are flattened, which may be controlled with the flat flag. The array views can be turned into regular arrays with the copy flag. + """ arrays = np.broadcast_arrays(*np.ix_(*arrays)) if flat: @@ -1967,11 +2001,11 @@ def cartesian_product(arrays, flat=True, copy=False): def cross_index(values, index): - """ - Allows efficiently indexing into a cartesian product without + """Allows efficiently indexing into a cartesian product without expanding it. The values should be defined as a list of iterables making up the cartesian product and a linear index, returning the cross product of the values at the supplied index. + """ lengths = [len(v) for v in values] length = np.prod(lengths) @@ -1987,9 +2021,9 @@ def cross_index(values, index): def arglexsort(arrays): - """ - Returns the indices of the lexicographical sorting + """Returns the indices of the lexicographical sorting order of the supplied arrays. + """ dtypes = ','.join(array.dtype.str for array in arrays) recarray = np.empty(len(arrays[0]), dtype=dtypes) @@ -1999,9 +2033,9 @@ def arglexsort(arrays): def dimensioned_streams(dmap): - """ - Given a DynamicMap return all streams that have any dimensioned + """Given a DynamicMap return all streams that have any dimensioned parameters, i.e. parameters also listed in the key dimensions. + """ dimensioned = [] for stream in dmap.streams: @@ -2012,10 +2046,10 @@ def dimensioned_streams(dmap): def expand_grid_coords(dataset, dim): - """ - Expand the coordinates along a dimension of the gridded + """Expand the coordinates along a dimension of the gridded dataset into an ND-array matching the dimensionality of the dataset. + """ irregular = [d.name for d in dataset.kdims if d is not dim and dataset.interface.irregular(dataset, d)] @@ -2031,16 +2065,16 @@ def expand_grid_coords(dataset, dim): def dt64_to_dt(dt64): - """ - Safely converts NumPy datetime64 to a datetime object. + """Safely converts NumPy datetime64 to a datetime object. + """ ts = (dt64 - np.datetime64('1970-01-01T00:00:00')) / np.timedelta64(1, 's') return dt.datetime(1970,1,1,0,0,0) + dt.timedelta(seconds=ts) def is_nan(x): - """ - Checks whether value is NaN on arbitrary types + """Checks whether value is NaN on arbitrary types + """ try: # Using pd.isna instead of np.isnan as np.isnan(pd.NA) returns pd.NA! @@ -2051,10 +2085,10 @@ def is_nan(x): def bound_range(vals, density, time_unit='us'): - """ - Computes a bounding range and density from a number of samples + """Computes a bounding range and density from a number of samples assumed to be evenly spaced. Density is rounded to machine precision using significant digits reported by sys.float_info.dig. + """ if not len(vals): return(np.nan, np.nan, density, False) @@ -2079,20 +2113,20 @@ def bound_range(vals, density, time_unit='us'): def validate_regular_sampling(values, rtol=10e-6): - """ - Validates regular sampling of a 1D array ensuring that the difference + """Validates regular sampling of a 1D array ensuring that the difference in sampling steps is at most rtol times the smallest sampling step. Returns a boolean indicating whether the sampling is regular. + """ diffs = np.diff(values) return (len(diffs) < 1) or abs(diffs.min()-diffs.max()) < abs(diffs.min()*rtol) def compute_density(start, end, length, time_unit='us'): - """ - Computes a grid density given the edges and number of samples. + """Computes a grid density given the edges and number of samples. Handles datetime grids correctly by computing timedeltas and computing a density for the given time_unit. + """ if isinstance(start, int): start = float(start) if isinstance(end, int): end = float(end) @@ -2107,9 +2141,9 @@ def compute_density(start, end, length, time_unit='us'): def date_range(start, end, length, time_unit='us'): - """ - Computes a date range given a start date, end date and the number + """Computes a date range given a start date, end date and the number of samples. + """ step = (1./compute_density(start, end, length, time_unit)) if isinstance(start, pd.Timestamp): @@ -2119,15 +2153,15 @@ def date_range(start, end, length, time_unit='us'): def parse_datetime(date): - """ - Parses dates specified as string or integer or pandas Timestamp + """Parses dates specified as string or integer or pandas Timestamp + """ return pd.to_datetime(date).to_datetime64() def parse_datetime_selection(sel): - """ - Parses string selection specs as datetimes. + """Parses string selection specs as datetimes. + """ if isinstance(sel, str) or isdatetime(sel): sel = parse_datetime(sel) @@ -2142,8 +2176,8 @@ def parse_datetime_selection(sel): def dt_to_int(value, time_unit='us'): - """ - Converts a datetime type to an integer with the supplied time unit. + """Converts a datetime type to an integer with the supplied time unit. + """ if isinstance(value, pd.Period): value = value.to_timestamp() @@ -2190,11 +2224,13 @@ def cftime_to_timestamp(date, time_unit='us'): calendar. In order to handle these dates correctly a custom bokeh model with support for other calendars would have to be defined. - Args: - date: cftime datetime object (or array) + Parameters + ---------- + date : cftime datetime object (or array) - Returns: - time_unit since 1970-01-01 00:00:00 + Returns + ------- + time_unit since 1970-01-01 00:00:00 """ import cftime if time_unit == 'us': @@ -2206,9 +2242,9 @@ def cftime_to_timestamp(date, time_unit='us'): calendar='standard')*tscale def search_indices(values, source): - """ - Given a set of values returns the indices of each of those values + """Given a set of values returns the indices of each of those values in the source array. + """ try: orig_indices = source.argsort() @@ -2223,10 +2259,10 @@ def search_indices(values, source): def compute_edges(edges): - """ - Computes edges as midpoints of the bin centers. The first and + """Computes edges as midpoints of the bin centers. The first and last boundaries are equidistant from the first and last midpoints respectively. + """ edges = np.asarray(edges) if edges.dtype.kind == 'i': @@ -2237,8 +2273,8 @@ def compute_edges(edges): def mimebundle_to_html(bundle): - """ - Converts a MIME bundle into HTML. + """Converts a MIME bundle into HTML. + """ if isinstance(bundle, tuple): data, metadata = bundle @@ -2252,8 +2288,8 @@ def mimebundle_to_html(bundle): def numpy_scalar_to_python(scalar): - """ - Converts a NumPy scalar to a regular python type. + """Converts a NumPy scalar to a regular python type. + """ scalar_type = type(scalar) if issubclass(scalar_type, np.float64): @@ -2264,9 +2300,9 @@ def numpy_scalar_to_python(scalar): def closest_match(match, specs, depth=0): - """ - Recursively iterates over type, group, label and overlay key, + """Recursively iterates over type, group, label and overlay key, finding the closest matching spec. + """ if len(match) == 0: return None @@ -2299,13 +2335,12 @@ def closest_match(match, specs, depth=0): def cast_array_to_int64(array): - """ - Convert a numpy array to `int64`. Suppress the following warning + """Convert a numpy array to `int64`. Suppress the following warning emitted by Numpy, which as of 12/2021 has been extensively discussed (https://github.com/pandas-dev/pandas/issues/22384) and whose fate (possible revert) has not yet been settled: - FutureWarning: casting datetime64[ns] values to int64 with .astype(...) + FutureWarning : casting datetime64[ns] values to int64 with .astype(...) is deprecated and will raise in a future version. Use .view(...) instead. """ @@ -2319,8 +2354,7 @@ def cast_array_to_int64(array): def flatten(line): - """ - Flatten an arbitrarily nested sequence. + """Flatten an arbitrarily nested sequence. Inspired by: pd.core.common.flatten @@ -2337,7 +2371,6 @@ def flatten(line): ------- flattened : generator """ - for element in line: if any(isinstance(element, tp) for tp in (list, tuple, dict)): yield from flatten(element) @@ -2346,17 +2379,18 @@ def flatten(line): def lazy_isinstance(obj, class_or_tuple): - """ Lazy isinstance check + """Lazy isinstance check Will only import the module of the object if the module of the obj matches the first value of an item in class_or_tuple. lazy_isinstance(obj, 'dask.dataframe:DataFrame') - Will: + Will : 1) check if the first module is dask 2) If it dask, import dask.dataframe 3) Do an isinstance check for dask.dataframe.DataFrame + """ if isinstance(class_or_tuple, str): class_or_tuple = (class_or_tuple,) diff --git a/holoviews/element/__init__.py b/holoviews/element/__init__.py index 70b508307e..cb8c86b0f7 100644 --- a/holoviews/element/__init__.py +++ b/holoviews/element/__init__.py @@ -14,10 +14,10 @@ class ElementConversion(DataConversion): - """ - ElementConversion is a subclass of DataConversion providing + """ElementConversion is a subclass of DataConversion providing concrete methods to convert a Dataset to specific Element types. + """ def bars(self, kdims=None, vdims=None, groupby=None, **kwargs): diff --git a/holoviews/element/annotation.py b/holoviews/element/annotation.py index 3f846fcc59..3da6ded43a 100644 --- a/holoviews/element/annotation.py +++ b/holoviews/element/annotation.py @@ -43,8 +43,7 @@ class VSpans(VectorizedAnnotation): class Annotation(Element2D): - """ - An Annotation is a special type of element that is designed to be + """An Annotation is a special type of element that is designed to be overlaid on top of any arbitrary 2D element. Annotations have neither key nor value dimensions allowing them to be overlaid over any type of data. @@ -55,6 +54,7 @@ class Annotation(Element2D): directions) unless an explicit 'extents' parameter is supplied. The extents of the bottom Annotation in the Overlay is used when multiple Annotations are displayed together. + """ kdims = param.List(default=[Dimension('x'), Dimension('y')], @@ -88,13 +88,18 @@ def __getitem__(self, key): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -136,13 +141,18 @@ def __init__(self, x, **params): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -154,7 +164,9 @@ def dimension_values(self, dimension, expanded=True, flat=True): class HLine(Annotation): - """Horizontal line annotation at the given position.""" + """Horizontal line annotation at the given position. + + """ group = param.String(default='HLine', constant=True) @@ -171,13 +183,18 @@ def __init__(self, y, **params): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -209,12 +226,16 @@ def from_scatter(cls, element, **kwargs): Computes the slope and y-intercept from an element containing x- and y-coordinates. - Args: - element: Element to compute slope from - kwargs: Keyword arguments to pass to the Slope element + Parameters + ---------- + element + Element to compute slope from + kwargs + Keyword arguments to pass to the Slope element - Returns: - Slope element + Returns + ------- + Slope element """ x, y = (element.dimension_values(i) for i in range(2)) par = np.polyfit(x, y, 1, full=True) @@ -243,13 +264,18 @@ def __init__(self, x1=None, x2=None, **params): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -261,7 +287,9 @@ def dimension_values(self, dimension, expanded=True, flat=True): class HSpan(Annotation): - """Horizontal span annotation at the given position.""" + """Horizontal span annotation at the given position. + + """ group = param.String(default='HSpan', constant=True) @@ -279,13 +307,18 @@ def __init__(self, y1=None, y2=None, **params): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -298,8 +331,7 @@ def dimension_values(self, dimension, expanded=True, flat=True): class Spline(Annotation): - """ - Draw a spline using the given handle coordinates and handle + """Draw a spline using the given handle coordinates and handle codes. The constructor accepts a tuple in format (coords, codes). Follows format of matplotlib spline definitions as used in @@ -311,6 +343,7 @@ class Spline(Annotation): Path.CURVE3 : 3 Path.CURVE4 : 4 Path.CLOSEPLOY: 79 + """ group = param.String(default='Spline', constant=True) @@ -321,15 +354,20 @@ def __init__(self, spline_points, **params): def clone(self, data=None, shared_data=True, new_type=None, *args, **overrides): """Clones the object, overriding data and parameters. - Args: - data: New data replacing the existing data - shared_data (bool, optional): Whether to use existing data - new_type (optional): Type to cast object to - *args: Additional arguments to pass to constructor - **overrides: New keyword arguments to pass to constructor - - Returns: - Cloned Spline + Parameters + ---------- + data + New data replacing the existing data + shared_data : bool, optional + Whether to use existing data + new_type : optional + Type to cast object to + *args: Additional arguments to pass to constructor + **overrides: New keyword arguments to pass to constructor + + Returns + ------- + Cloned Spline """ return Element2D.clone(self, data, shared_data, new_type, *args, **overrides) @@ -337,13 +375,17 @@ def clone(self, data=None, shared_data=True, new_type=None, *args, **overrides): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension : The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index in [0, 1]: @@ -354,10 +396,10 @@ def dimension_values(self, dimension, expanded=True, flat=True): class Arrow(Annotation): - """ - Draw an arrow to the given xy position with optional text at + """Draw an arrow to the given xy position with optional text at distance 'points' away. The direction of the arrow may be specified as well as the arrow head style. + """ x = param.ClassSelector(default=0, class_=(Number, *datetime_types), doc=""" @@ -395,9 +437,9 @@ def __init__(self, x, y, text='', direction='<', **params) def __setstate__(self, d): - """ - Add compatibility for unpickling old Arrow types with different + """Add compatibility for unpickling old Arrow types with different .data format. + """ super().__setstate__(d) if len(self.data) == 5: @@ -407,13 +449,18 @@ def __setstate__(self, d): def dimension_values(self, dimension, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ index = self.get_dimension_index(dimension) if index == 0: @@ -426,10 +473,11 @@ def dimension_values(self, dimension, expanded=True, flat=True): class Text(Annotation): - """ - Draw a text annotation at the specified position with custom + """Draw a text annotation at the specified position with custom fontsize, alignment and rotation. + """ + x = param.ClassSelector(default=0, class_=(Number, str, *datetime_types), doc=""" The x-position of the arrow which make be numeric or a timestamp.""") @@ -466,9 +514,9 @@ def __init__(self, x, y, text, fontsize=12, class Div(Element): - """ - The Div element represents a div DOM node in an HTML document defined + """The Div element represents a div DOM node in an HTML document defined as a string containing valid HTML. + """ group = param.String(default='Div', constant=True) @@ -484,11 +532,11 @@ def __init__(self, data, **params): class Labels(Dataset, Element2D): - """ - Labels represents a collection of text labels associated with 2D + """Labels represents a collection of text labels associated with 2D coordinates. Unlike the Text annotation, Labels is a Dataset type which allows drawing vectorized labels from tabular or gridded data. + """ kdims = param.List(default=[Dimension('x'), Dimension('y')], diff --git a/holoviews/element/chart.py b/holoviews/element/chart.py index 6b5f52b547..ce6ba4b8c9 100644 --- a/holoviews/element/chart.py +++ b/holoviews/element/chart.py @@ -12,8 +12,7 @@ class Chart(Dataset, Element2D): - """ - A Chart is an abstract baseclass for elements representing one or + """A Chart is an abstract baseclass for elements representing one or more independent and dependent variables defining a 1D coordinate system with associated values. The independent variables or key dimensions map onto the x-axis while the dependent variables are @@ -29,6 +28,7 @@ class Chart(Dataset, Element2D): Since a Chart is a subclass of a Dataset it supports the full set of data interfaces but usually each dimension of a chart represents a column stored in a dictionary, array or DataFrame. + """ kdims = param.List(default=[Dimension('x')], bounds=(1,2), doc=""" @@ -58,30 +58,29 @@ def __getitem__(self, index): class Scatter(Selection1DExpr, Chart): - """ - Scatter is a Chart element representing a set of points in a 1D + """Scatter is a Chart element representing a set of points in a 1D coordinate system where the key dimension maps to the points location along the x-axis while the first value dimension represents the location of the point along the y-axis. + """ group = param.String(default='Scatter', constant=True) class Curve(Selection1DExpr, Chart): - """ - Curve is a Chart element representing a line in a 1D coordinate + """Curve is a Chart element representing a line in a 1D coordinate system where the key dimension maps on the line x-coordinate and the first value dimension represents the height of the line along the y-axis. + """ group = param.String(default='Curve', constant=True) class ErrorBars(Selection1DExpr, Chart): - """ - ErrorBars is a Chart element representing error bars in a 1D + """ErrorBars is a Chart element representing error bars in a 1D coordinate system where the key dimension corresponds to the location along the x-axis and the first value dimension corresponds to the location along the y-axis and one or two @@ -93,6 +92,7 @@ class ErrorBars(Selection1DExpr, Chart): positive errors. By default the errors are defined along y-axis. A parameter `horizontal`, when set `True`, will define the errors along the x-axis. + """ group = param.String(default='ErrorBars', constant=True, doc=""" @@ -111,15 +111,20 @@ def range(self, dim, data_range=True, dimension_range=True): Range of the y-dimension includes the symmetric or asymmetric error. - Args: - dimension: The dimension to compute the range on. - data_range (bool): Compute range from data values - dimension_range (bool): Include Dimension ranges - Whether to include Dimension range and soft_range - in range calculation - - Returns: - Tuple containing the lower and upper bound + Parameters + ---------- + dimension + The dimension to compute the range on. + data_range : bool + Compute range from data values + dimension_range : bool + Include Dimension ranges + Whether to include Dimension range and soft_range + in range calculation + + Returns + ------- + Tuple containing the lower and upper bound """ dim_with_err = 0 if self.horizontal else 1 didx = self.get_dimension_index(dim) @@ -140,24 +145,24 @@ def range(self, dim, data_range=True, dimension_range=True): class Spread(ErrorBars): - """ - Spread is a Chart element representing a spread of values or + """Spread is a Chart element representing a spread of values or confidence band in a 1D coordinate system. The key dimension(s) corresponds to the location along the x-axis and the value dimensions define the location along the y-axis as well as the symmetric or asymmetric spread. + """ group = param.String(default='Spread', constant=True) class Bars(Selection1DExpr, Chart): - """ - Bars is a Chart element representing categorical observations + """Bars is a Chart element representing categorical observations using the height of rectangular bars. The key dimensions represent the categorical groupings of the data, but may also be used to stack the bars, while the first value dimension represents the height of each bar. + """ group = param.String(default='Bars', constant=True) @@ -168,12 +173,12 @@ class Bars(Selection1DExpr, Chart): class Histogram(Selection1DExpr, Chart): - """ - Histogram is a Chart element representing a number of bins in a 1D + """Histogram is a Chart element representing a number of bins in a 1D coordinate system. The key dimension represents the binned values, which may be declared as bin edges or bin centers, while the value dimensions usually defines a count, frequency or density associated with each bin. + """ datatype = param.List(default=['grid']) @@ -199,19 +204,21 @@ def __init__(self, data, **params): @property def edges(self): - "Property to access the Histogram edges provided for backward compatibility" + """Property to access the Histogram edges provided for backward compatibility + + """ return self.interface.coords(self, self.kdims[0], edges=True) class Spikes(Selection1DExpr, Chart): - """ - Spikes is a Chart element which represents a number of discrete + """Spikes is a Chart element which represents a number of discrete spikes, events or observations in a 1D coordinate system. The key dimension therefore represents the position of each spike along the x-axis while the first value dimension, if defined, controls the height along the y-axis. It may therefore be used to visualize the distribution of discrete events, representing a rug plot, or to draw the strength some signal. + """ group = param.String(default='Spikes', constant=True) @@ -224,8 +231,7 @@ class Spikes(Selection1DExpr, Chart): class Area(Curve): - """ - Area is a Chart element representing the area under a curve or + """Area is a Chart element representing the area under a curve or between two curves in a 1D coordinate system. The key dimension represents the location of each coordinate along the x-axis, while the value dimension(s) represent the height of the area or the @@ -233,16 +239,17 @@ class Area(Curve): Multiple areas may be stacked by overlaying them an passing them to the stack method. + """ group = param.String(default='Area', constant=True) @classmethod def stack(cls, areas, baseline_name='Baseline'): - """ - Stacks an (Nd)Overlay of Area or Curve Elements by offsetting + """Stacks an (Nd)Overlay of Area or Curve Elements by offsetting their baselines. To stack a HoloMap or DynamicMap use the map method. + """ if not len(areas): return areas diff --git a/holoviews/element/chart3d.py b/holoviews/element/chart3d.py index fdce74623c..06efdbcc7f 100644 --- a/holoviews/element/chart3d.py +++ b/holoviews/element/chart3d.py @@ -7,8 +7,7 @@ class Surface(Image, Element3D): - """ - A Surface represents a regularly sampled 2D grid with associated + """A Surface represents a regularly sampled 2D grid with associated values defining the height along the z-axis. The key dimensions of a Surface represent the 2D coordinates along the x- and y-axes while the value dimension declares the height at each grid @@ -17,6 +16,7 @@ class Surface(Image, Element3D): The data of a Surface is usually defined as a 2D array of values and either a bounds tuple defining the extent in the 2D space or explicit x- and y-coordinate arrays. + """ extents = param.Tuple(default=(None, None, None, None, None, None), doc=""" @@ -43,12 +43,12 @@ def _get_selection_expr_for_stream_value(self, **kwargs): class TriSurface(Element3D, Points): - """ - TriSurface represents a set of coordinates in 3D space which + """TriSurface represents a set of coordinates in 3D space which define a surface via a triangulation algorithm (usually Delauney triangulation). They key dimensions of a TriSurface define the position of each point along the x-, y- and z-axes, while value dimensions can provide additional information about each point. + """ group = param.String(default='TriSurface', constant=True) @@ -67,8 +67,7 @@ def __getitem__(self, slc): class Scatter3D(Element3D, Points): - """ - Scatter3D is a 3D element representing the position of a collection + """Scatter3D is a 3D element representing the position of a collection of coordinates in a 3D space. The key dimensions represent the position of each coordinate along the x-, y- and z-axis. @@ -114,6 +113,7 @@ class Scatter3D(Element3D, Points): colorbar=True, marker="circle", ) + """ kdims = param.List(default=[Dimension('x'), @@ -131,11 +131,11 @@ def __getitem__(self, slc): class Path3D(Element3D, Path): - """ - Path3D is a 3D element representing a line through 3D space. The + """Path3D is a 3D element representing a line through 3D space. The key dimensions represent the position of each coordinate along the x-, y- and z-axis while the value dimensions can optionally supply additional information. + """ kdims = param.List(default=[Dimension('x'), diff --git a/holoviews/element/comparison.py b/holoviews/element/comparison.py index 2d753c6d88..79deed9ee6 100644 --- a/holoviews/element/comparison.py +++ b/holoviews/element/comparison.py @@ -1,5 +1,4 @@ -""" -Helper classes for comparing the equality of two HoloViews objects. +"""Helper classes for comparing the equality of two HoloViews objects. These classes are designed to integrate with unittest.TestCase (see the tests directory) while making equality testing easily accessible @@ -16,6 +15,7 @@ methods on all objects as comparison operators only return Booleans and thus would not supply any information regarding *why* two elements are considered different. + """ import contextlib from functools import partial @@ -47,8 +47,7 @@ class ComparisonInterface: - """ - This class is designed to allow equality testing to work + """This class is designed to allow equality testing to work seamlessly with unittest.TestCase as a mix-in by implementing a compatible interface (namely the assertEqual method). @@ -56,6 +55,7 @@ class ComparisonInterface: method of the same name when used as a mix-in with TestCase. The contents of the equality_type_funcs dictionary is suitable for use with TestCase.addTypeEqualityFunc. + """ equality_type_funcs = {} @@ -63,8 +63,8 @@ class ComparisonInterface: @classmethod def simple_equality(cls, first, second, msg=None): - """ - Classmethod equivalent to unittest.TestCase method (longMessage = False.) + """Classmethod equivalent to unittest.TestCase method (longMessage = False.) + """ check = first==second if not isinstance(check, bool) and hasattr(check, "all"): @@ -76,8 +76,8 @@ def simple_equality(cls, first, second, msg=None): @classmethod def assertEqual(cls, first, second, msg=None): - """ - Classmethod equivalent to unittest.TestCase method + """Classmethod equivalent to unittest.TestCase method + """ asserter = None if type(first) is type(second) or (is_float(first) and is_float(second)): @@ -97,8 +97,7 @@ def assertEqual(cls, first, second, msg=None): class Comparison(ComparisonInterface): - """ - Class used for comparing two HoloViews objects, including complex + """Class used for comparing two HoloViews objects, including complex composite objects. Comparisons are available as classmethods, the most general being the assertEqual method that is intended to work with any input. @@ -106,6 +105,7 @@ class Comparison(ComparisonInterface): For instance, to test if two Image objects are equal you can use: Comparison.assertEqual(matrix1, matrix2) + """ # someone might prefer to use a different function, e.g. assert_all_close @@ -767,8 +767,8 @@ def compare_cycles(cls, cycle1, cycle2, msg=None): class ComparisonTestCase(Comparison, TestCase): - """ - Class to integrate the Comparison class with unittest.TestCase. + """Class to integrate the Comparison class with unittest.TestCase. + """ def __init__(self, *args, **kwargs): diff --git a/holoviews/element/geom.py b/holoviews/element/geom.py index 52d8af596d..06bf32ed35 100644 --- a/holoviews/element/geom.py +++ b/holoviews/element/geom.py @@ -6,11 +6,11 @@ class Geometry(Dataset, Element2D): - """ - Geometry elements represent a collection of objects drawn in + """Geometry elements represent a collection of objects drawn in a 2D coordinate system. The two key dimensions correspond to the x- and y-coordinates in the 2D space, while the value dimensions may be used to control other visual attributes of the Geometry + """ group = param.String(default='Geometry', constant=True) @@ -27,9 +27,9 @@ class Geometry(Dataset, Element2D): class Points(Selection2DExpr, Geometry): - """ - Points represents a set of coordinates in 2D space, which may + """Points represents a set of coordinates in 2D space, which may optionally be associated with any number of value dimensions. + """ group = param.String(default='Points', constant=True) @@ -38,12 +38,12 @@ class Points(Selection2DExpr, Geometry): class VectorField(Selection2DExpr, Geometry): - """ - A VectorField represents a set of vectors in 2D space with an + """A VectorField represents a set of vectors in 2D space with an associated angle, as well as an optional magnitude and any number of other value dimensions. The angles are assumed to be defined in radians and by default the magnitude is assumed to be normalized to be between 0 and 1. + """ group = param.String(default='VectorField', constant=True) @@ -82,9 +82,10 @@ def from_uv(cls, data, kdims=None, vdims=None, **params): class Segments(SelectionGeomExpr, Geometry): + """Segments represent a collection of lines in 2D space. + """ - Segments represent a collection of lines in 2D space. - """ + group = param.String(default='Segments', constant=True) kdims = param.List(default=[Dimension('x0'), Dimension('y0'), @@ -95,8 +96,8 @@ class Segments(SelectionGeomExpr, Geometry): class Rectangles(SelectionGeomExpr, Geometry): - """ - Rectangles represent a collection of axis-aligned rectangles in 2D space. + """Rectangles represent a collection of axis-aligned rectangles in 2D space. + """ group = param.String(default='Rectangles', constant=True) diff --git a/holoviews/element/graphs.py b/holoviews/element/graphs.py index b25f9c48f9..9086b8574c 100644 --- a/holoviews/element/graphs.py +++ b/holoviews/element/graphs.py @@ -21,9 +21,9 @@ class RedimGraph(Redim): - """ - Extension for the redim utility that allows re-dimensioning + """Extension for the redim utility that allows re-dimensioning Graph objects including their nodes and edgepaths. + """ def __call__(self, specs=None, **dimensions): @@ -37,11 +37,11 @@ def __call__(self, specs=None, **dimensions): class layout_nodes(Operation): - """ - Accepts a Graph and lays out the corresponding nodes with the + """Accepts a Graph and lays out the corresponding nodes with the supplied networkx layout function. If no layout function is supplied uses a simple circular_layout function. Also supports LayoutAlgorithm function provided in datashader layouts. + """ only_nodes = param.Boolean(default=False, doc=""" @@ -85,10 +85,10 @@ def _process(self, element, key=None): class Nodes(Points): - """ - Nodes is a simple Element representing Graph nodes as a set of + """Nodes is a simple Element representing Graph nodes as a set of Points. Unlike regular Points, Nodes must define a third key dimension corresponding to the node index. + """ kdims = param.List(default=[Dimension('x'), Dimension('y'), @@ -98,17 +98,16 @@ class Nodes(Points): class EdgePaths(Path): - """ - EdgePaths is a simple Element representing the paths of edges + """EdgePaths is a simple Element representing the paths of edges connecting nodes in a graph. + """ group = param.String(default='EdgePaths', constant=True) class Graph(Dataset, Element2D): - """ - Graph is high-level Element representing both nodes and edges. + """Graph is high-level Element representing both nodes and edges. A Graph may be defined in an abstract form representing just the abstract edges between nodes and optionally may be made concrete by supplying a Nodes Element defining the concrete @@ -119,6 +118,7 @@ class Graph(Dataset, Element2D): The constructor accepts regular columnar data defining the edges or a tuple of the abstract edges and nodes, or a tuple of the abstract edges, nodes, and edgepaths. + """ group = param.String(default='Graph', constant=True) @@ -241,8 +241,7 @@ def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides) def select(self, selection_expr=None, selection_specs=None, selection_mode='edges', **selection): - """ - Allows selecting data by the slices, sets and scalar values + """Allows selecting data by the slices, sets and scalar values along a particular dimension. The indices should be supplied as keywords mapping between the selected dimension and value. Additionally selection_specs (taking the form of a list @@ -253,13 +252,14 @@ def select(self, selection_expr=None, selection_specs=None, selection_mode='edge Selecting by a node dimensions selects all edges and nodes that are connected to the selected nodes. To select only edges between the selected nodes set the selection_mode to 'nodes'. + """ from ..util.transform import dim if selection_expr is not None and not isinstance(selection_expr, dim): raise ValueError("""\ -The first positional argument to the Dataset.select method is expected to be a -holoviews.util.transform.dim expression. Use the selection_specs keyword -argument to specify a selection specification""") + The first positional argument to the Dataset.select method is expected to be a + holoviews.util.transform.dim expression. Use the selection_specs keyword + argument to specify a selection specification""") sel_dims = (*self.dimensions('ranges'), 'selection_mask') selection = {dim: sel for dim, sel in selection.items() if dim in sel_dims} @@ -356,11 +356,10 @@ def dimensions(self, selection='all', label=False): @property def nodes(self): - """ - Computes the node positions the first time they are requested + """Computes the node positions the first time they are requested if no explicit node information was supplied. - """ + """ if self._nodes is None: from ..operation.element import chain self._nodes = layout_nodes(self, only_nodes=True) @@ -370,9 +369,9 @@ def nodes(self): @property def edgepaths(self): - """ - Returns the fixed EdgePaths or computes direct connections + """Returns the fixed EdgePaths or computes direct connections between supplied nodes. + """ if self._edgepaths: return self._edgepaths @@ -381,8 +380,7 @@ def edgepaths(self): @classmethod def from_networkx(cls, G, positions, nodes=None, **kwargs): - """ - Generate a HoloViews Graph from a networkx.Graph object and + """Generate a HoloViews Graph from a networkx.Graph object and networkx layout function or dictionary of node positions. Any keyword arguments will be passed to the layout function. By default it will extract all node and edge @@ -390,16 +388,21 @@ def from_networkx(cls, G, positions, nodes=None, **kwargs): information may also be supplied. Any non-scalar attributes, such as lists or dictionaries will be ignored. - Args: - G (networkx.Graph): Graph to convert to Graph element - positions (dict or callable): Node positions - Node positions defined as a dictionary mapping from - node id to (x, y) tuple or networkx layout function - which computes a positions dictionary - kwargs (dict): Keyword arguments for layout function - - Returns: - Graph element + Parameters + ---------- + G : networkx.Graph + Graph to convert to Graph element + positions : dict or callable + Node positions + Node positions defined as a dictionary mapping from + node id to (x, y) tuple or networkx layout function + which computes a positions dictionary + kwargs : dict + Keyword arguments for layout function + + Returns + ------- + Graph element """ if not isinstance(positions, dict): positions = positions(G, **kwargs) @@ -475,8 +478,7 @@ def from_networkx(cls, G, positions, nodes=None, **kwargs): class TriMesh(Graph): - """ - A TriMesh represents a mesh of triangles represented as the + """A TriMesh represents a mesh of triangles represented as the simplices and nodes. The simplices represent a indices into the nodes array. The mesh therefore follows a datastructure very similar to a graph, with the abstract connectivity between nodes @@ -486,6 +488,7 @@ class TriMesh(Graph): Unlike a Graph each simplex is represented as the node indices of the three corners of each triangle. + """ kdims = param.List(default=['node1', 'node2', 'node3'], @@ -541,9 +544,9 @@ def __init__(self, data, kdims=None, vdims=None, **params): @classmethod def from_vertices(cls, data): - """ - Uses Delauney triangulation to compute triangle simplices for + """Uses Delauney triangulation to compute triangle simplices for each point. + """ try: from scipy.spatial import Delaunay @@ -558,8 +561,8 @@ def from_vertices(cls, data): return cls((tris.simplices, data)) def _initialize_edgepaths(self): - """ - Returns the EdgePaths by generating a triangle for each simplex. + """Returns the EdgePaths by generating a triangle for each simplex. + """ if self._edgepaths: return self._edgepaths @@ -582,20 +585,20 @@ def _initialize_edgepaths(self): @property def edgepaths(self): - """ - Returns the EdgePaths by generating a triangle for each simplex. + """Returns the EdgePaths by generating a triangle for each simplex. + """ return self._initialize_edgepaths() def select(self, selection_specs=None, **selection): - """ - Allows selecting data by the slices, sets and scalar values + """Allows selecting data by the slices, sets and scalar values along a particular dimension. The indices should be supplied as keywords mapping between the selected dimension and value. Additionally selection_specs (taking the form of a list of type.group.label strings, types or functions) may be supplied, which will ensure the selection is only applied if the specs match the selected object. + """ self._initialize_edgepaths() return super().select(selection_specs=None, @@ -605,8 +608,7 @@ def select(self, selection_specs=None, **selection): class layout_chords(Operation): - """ - layout_chords computes the locations of each node on a circle and + """layout_chords computes the locations of each node on a circle and the chords connecting them. The amount of radial angle devoted to each node and the number of chords are scaled by the value dimension of the Chord element. If the values are integers then @@ -620,6 +622,7 @@ class layout_chords(Operation): source to the target node in the graph, the number of samples to interpolate the spline with is given by the chord_samples parameter. + """ chord_samples = param.Integer(default=50, bounds=(0, None), doc=""" @@ -735,8 +738,7 @@ def _process(self, element, key=None): class Chord(Graph): - """ - Chord is a special type of Graph which computes the locations of + """Chord is a special type of Graph which computes the locations of each node on a circle and the chords connecting them. The amount of radial angle devoted to each node and the number of chords are scaled by a weight supplied as a value dimension. @@ -746,6 +748,7 @@ class Chord(Graph): chords are apportioned such that the lowest value edge is given one chord and all other nodes are given nodes proportional to their weight. + """ group = param.String(default='Chord', constant=True) diff --git a/holoviews/element/path.py b/holoviews/element/path.py index 6838da9ebe..fd6426139b 100644 --- a/holoviews/element/path.py +++ b/holoviews/element/path.py @@ -1,8 +1,8 @@ -""" -The path module provides a set of elements to draw paths and polygon +"""The path module provides a set of elements to draw paths and polygon geometries in 2D space. In addition to three general elements are Path, Contours and Polygons, it defines a number of elements to quickly draw common shapes. + """ import numpy as np @@ -16,8 +16,7 @@ class Path(SelectionPolyExpr, Geometry): - """ - The Path element represents one or more of path geometries with + """The Path element represents one or more of path geometries with associated values. Each path geometry may be split into sub-geometries on NaN-values and may be associated with scalar values or array values varying along its length. In analogy to @@ -51,6 +50,7 @@ class Path(SelectionPolyExpr, Geometry): the `Path.split` method, which returns each path geometry as a separate entity, while the other methods assume a flattened representation where all paths are separated by NaN values. + """ group = param.String(default="Path", constant=True) @@ -127,20 +127,22 @@ def select(self, selection_expr=None, selection_specs=None, **selection): from holoviews import dim ds.select(selection_expr=dim('x') % 2 == 0) - Args: - selection_expr: holoviews.dim predicate expression - specifying selection. - selection_specs: List of specs to match on - A list of types, functions, or type[.group][.label] - strings specifying which objects to apply the - selection on. - **selection: Dictionary declaring selections by dimension - Selections can be scalar values, tuple ranges, lists - of discrete values and boolean arrays - - Returns: - Returns an Dimensioned object containing the selected data - or a scalar if a single value was selected + Parameters + ---------- + selection_expr : holoviews.dim predicate expression + specifying selection. + selection_specs : List of specs to match on + A list of types, functions, or type[.group][.label] + strings specifying which objects to apply the + selection on. + **selection: Dictionary declaring selections by dimension + Selections can be scalar values, tuple ranges, lists + of discrete values and boolean arrays + + Returns + ------- + Returns an Dimensioned object containing the selected data + or a scalar if a single value was selected """ xdim, ydim = self.kdims[:2] x_range = selection.pop(xdim.name, None) @@ -154,10 +156,10 @@ def select(self, selection_expr=None, selection_specs=None, **selection): return sel[x_range, y_range] def split(self, start=None, end=None, datatype=None, **kwargs): - """ - The split method allows splitting a Path type into a list of + """The split method allows splitting a Path type into a list of subpaths of the same type. A start and/or end may be supplied to select a subset of paths. + """ if not self.interface.multi: if not len(self): @@ -177,8 +179,7 @@ def split(self, start=None, end=None, datatype=None, **kwargs): class Contours(Path): - """ - The Contours element is a subtype of a Path which is characterized + """The Contours element is a subtype of a Path which is characterized by the fact that each path geometry may only be associated with scalar values. It supports all the same data formats as a `Path` but does not allow continuously varying values along the path @@ -206,6 +207,7 @@ class Contours(Path): the `Contours.split` method, which returns each path geometry as a separate entity, while the other methods assume a flattened representation where all paths are separated by NaN values. + """ vdims = param.List(default=[], constant=True, doc=""" @@ -222,8 +224,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): class Polygons(Contours): - """ - The Polygons element represents one or more polygon geometries + """The Polygons element represents one or more polygon geometries with associated scalar values. Each polygon geometry may be split into sub-geometries on NaN-values and may be associated with scalar values. In analogy to GEOS geometry types a Polygons @@ -265,6 +266,7 @@ class Polygons(Contours): the `Polygons.split` method, which returns each path geometry as a separate entity, while the other methods assume a flattened representation where all paths are separated by NaN values. + """ group = param.String(default="Polygons", constant=True) @@ -280,30 +282,30 @@ class Polygons(Contours): @property def has_holes(self): - """ - Detects whether any polygon in the Polygons element defines + """Detects whether any polygon in the Polygons element defines holes. Useful to avoid expanding Polygons unless necessary. + """ return self.interface.has_holes(self) def holes(self): - """ - Returns a list-of-lists-of-lists of hole arrays. The three levels + """Returns a list-of-lists-of-lists of hole arrays. The three levels of nesting reflects the structure of the polygons: 1. The first level of nesting corresponds to the list of geometries 2. The second level corresponds to each Polygon in a MultiPolygon 3. The third level of nesting allows for multiple holes per Polygon + """ return self.interface.holes(self) class BaseShape(Path): - """ - A BaseShape is a Path that can be succinctly expressed by a small + """A BaseShape is a Path that can be succinctly expressed by a small number of parameters instead of a full path specification. For instance, a circle may be expressed by the center position and radius instead of an explicit list of path coordinates. + """ __abstract = True @@ -316,9 +318,9 @@ def __init__(self, **params): self.interface = MultiInterface def clone(self, *args, **overrides): - """ - Returns a clone of the object with matching parameter values + """Returns a clone of the object with matching parameter values containing the specified args and kwargs. + """ link = overrides.pop('link', True) settings = dict(self.param.values(), **overrides) @@ -335,9 +337,9 @@ def clone(self, *args, **overrides): class Box(BaseShape): - """ - Draw a centered box of a given width at the given position with + """Draw a centered box of a given width at the given position with the specified aspect ratio (if any). + """ x = param.Number(default=0, doc="The x-position of the box center.") @@ -385,8 +387,7 @@ def __init__(self, x, y, spec, **params): class Ellipse(BaseShape): - """ - Draw an axis-aligned ellipse at the specified x,y position with + """Draw an axis-aligned ellipse at the specified x,y position with the given orientation. The simplest (default) Ellipse is a circle, specified using: @@ -405,7 +406,9 @@ class Ellipse(BaseShape): Note that as a subclass of Path, internally an Ellipse is a sequence of (x,y) sample positions. Ellipse could also be implemented as an annotation that uses a dedicated ellipse artist. + """ + x = param.Number(default=0, doc="The x-position of the ellipse center.") y = param.Number(default=0, doc="The y-position of the ellipse center.") @@ -455,13 +458,13 @@ def __init__(self, x, y, spec, **params): class Bounds(BaseShape): - """ - An arbitrary axis-aligned bounding rectangle defined by the (left, + """An arbitrary axis-aligned bounding rectangle defined by the (left, bottom, right, top) coordinate positions. If supplied a single real number as input, this value will be treated as the radius of a square, zero-center box which will be used to compute the corresponding lbrt tuple. + """ lbrt = param.Tuple(default=(-0.5, -0.5, 0.5, 0.5), doc=""" diff --git a/holoviews/element/raster.py b/holoviews/element/raster.py index 42b3ffb1cd..5e83fcae41 100644 --- a/holoviews/element/raster.py +++ b/holoviews/element/raster.py @@ -19,8 +19,7 @@ class Raster(Element2D): - """ - Raster is a basic 2D element type for presenting either numpy or + """Raster is a basic 2D element type for presenting either numpy or dask arrays as two dimensional raster images. Arrays with a shape of (N,M) are valid inputs for Raster whereas @@ -30,6 +29,7 @@ class Raster(Element2D): Raster does not support slicing like the Image or RGB subclasses and the extents are in matrix coordinates if not explicitly specified. + """ kdims = param.List(default=[Dimension('x'), Dimension('y')], @@ -84,8 +84,8 @@ def range(self, dim, data_range=True, dimension_range=True): return super().range(dim, data_range, dimension_range) def dimension_values(self, dim, expanded=True, flat=True): - """ - The set of samples available along a particular dimension. + """The set of samples available along a particular dimension. + """ dim_idx = self.get_dimension_index(dim) if not expanded and dim_idx == 0: @@ -102,13 +102,13 @@ def dimension_values(self, dim, expanded=True, flat=True): return super().dimension_values(dim) def sample(self, samples=None, bounds=None, **sample_values): - """ - Sample the Raster along one or both of its dimensions, + """Sample the Raster along one or both of its dimensions, returning a reduced dimensionality type, which is either a ItemTable, Curve or Scatter. If two dimension samples and a new_xaxis is provided the sample will be the value of the sampled unit indexed by the value in the new_xaxis tuple. + """ if samples is None: samples = [] @@ -155,11 +155,11 @@ def sample(self, samples=None, bounds=None, **sample_values): def reduce(self, dimensions=None, function=None, **reduce_map): - """ - Reduces the Raster using functions provided via the + """Reduces the Raster using functions provided via the kwargs, where the keyword is the dimension to be reduced. Optionally a label_prefix can be provided to prepend to the result Element label. + """ function, dims = self._reduce_map(dimensions, function, reduce_map) if len(dims) == self.ndims: @@ -200,8 +200,7 @@ def __len__(self): class Image(Selection2DExpr, Dataset, Raster, SheetCoordinateSystem): - """ - Image represents a regularly sampled 2D grid of an underlying + """Image represents a regularly sampled 2D grid of an underlying continuous space of intensity values, which will be colormapped on plotting. The grid of intensity values may be specified as a NxM sized array of values along with a bounds, but it may also be @@ -225,6 +224,7 @@ class Image(Selection2DExpr, Dataset, Raster, SheetCoordinateSystem): Note that the interpretation of the orientation of the array changes depending on whether bounds or explicit coordinates are used. + """ bounds = param.ClassSelector(class_=BoundingRegion, default=BoundingBox(), doc=""" @@ -386,13 +386,13 @@ def _validate(self, data_bounds, supplied_bounds): def clone(self, data=None, shared_data=True, new_type=None, link=True, *args, **overrides): - """ - Returns a clone of the object with matching parameter values + """Returns a clone of the object with matching parameter values containing the specified args and kwargs. If shared_data is set to True and no data explicitly supplied, the clone will share data with the original. May also supply a new_type, which will inherit all shared parameters. + """ if data is None and (new_type is None or issubclass(new_type, Image)): sheet_params = dict(bounds=self.bounds, xdensity=self.xdensity, @@ -406,14 +406,14 @@ def aggregate(self, dimensions=None, function=None, spreadfn=None, **kwargs): return Curve(agg) if isinstance(agg, Dataset) and len(self.vdims) == 1 else agg def select(self, selection_specs=None, **selection): - """ - Allows selecting data by the slices, sets and scalar values + """Allows selecting data by the slices, sets and scalar values along a particular dimension. The indices should be supplied as keywords mapping between the selected dimension and value. Additionally selection_specs (taking the form of a list of type.group.label strings, types or functions) may be supplied, which will ensure the selection is only applied if the specs match the selected object. + """ if selection_specs and not any(self.matches(sp) for sp in selection_specs): return self @@ -456,11 +456,11 @@ def select(self, selection_specs=None, **selection): def closest(self, coords=None, **kwargs): - """ - Given a single coordinate or multiple coordinates as + """Given a single coordinate or multiple coordinates as a tuple or list of tuples or keyword arguments matching the dimension closest will find the closest actual x/y coordinates. + """ if coords is None: coords = [] @@ -508,8 +508,7 @@ def _coord2matrix(self, coord): class ImageStack(Image): - """ - ImageStack expands the capabilities of Image to by supporting + """ImageStack expands the capabilities of Image to by supporting multiple layers of images. As there is many ways to represent multiple layers of images, @@ -529,6 +528,7 @@ class ImageStack(Image): If no vdims are supplied, and the naming can be inferred like with a dictionary the levels will be named level_0, level_1, etc. + """ vdims = param.List(doc=""" @@ -574,8 +574,7 @@ def __init__(self, data, kdims=None, vdims=None, **params): class RGB(Image): - """ - RGB represents a regularly spaced 2D grid of an underlying + """RGB represents a regularly spaced 2D grid of an underlying continuous space of RGB(A) (red, green, blue and alpha) color space values. The definition of the grid closely matches the semantics of an Image and in the simplest case the grid may be @@ -599,6 +598,7 @@ class RGB(Image): Note that the interpretation of the orientation changes depending on whether bounds or explicit coordinates are used. + """ group = param.String(default='RGB', constant=True) @@ -621,12 +621,12 @@ class RGB(Image): @property def rgb(self): - """ - Returns the corresponding RGB element. + """Returns the corresponding RGB element. Other than the updating parameter definitions, this is the only change needed to implemented an arbitrary colorspace as a subclass of RGB. + """ return self @@ -634,17 +634,25 @@ def rgb(self): def load_image(cls, filename, height=1, array=False, bounds=None, bare=False, **kwargs): """Load an image from a file and return an RGB element or array - Args: - filename: Filename of the image to be loaded - height: Determines the bounds of the image where the width - is scaled relative to the aspect ratio of the image. - array: Whether to return an array (rather than RGB default) - bounds: Bounds for the returned RGB (overrides height) - bare: Whether to hide the axes - kwargs: Additional kwargs to the RGB constructor - - Returns: - RGB element or array + Parameters + ---------- + filename + Filename of the image to be loaded + height + Determines the bounds of the image where the width + is scaled relative to the aspect ratio of the image. + array + Whether to return an array (rather than RGB default) + bounds + Bounds for the returned RGB (overrides height) + bare + Whether to hide the axes + kwargs + Additional kwargs to the RGB constructor + + Returns + ------- + RGB element or array """ try: from PIL import Image @@ -716,8 +724,7 @@ def _has_alpha_dimension(self, data, vdims) -> bool: class HSV(RGB): - """ - HSV represents a regularly spaced 2D grid of an underlying + """HSV represents a regularly spaced 2D grid of an underlying continuous space of HSV (hue, saturation and value) color space values. The definition of the grid closely matches the semantics of an Image or RGB element and in the simplest case the grid may @@ -741,6 +748,7 @@ class HSV(RGB): Note that the interpretation of the orientation changes depending on whether bounds or explicit coordinates are used. + """ group = param.String(default='HSV', constant=True) @@ -763,8 +771,8 @@ class HSV(RGB): @property def rgb(self): - """ - Conversion from HSV to RGB. + """Conversion from HSV to RGB. + """ coords = tuple(self.dimension_values(d, expanded=False) for d in self.kdims) @@ -783,8 +791,7 @@ def rgb(self): class QuadMesh(Selection2DExpr, Dataset, Element2D): - """ - A QuadMesh represents 2D rectangular grid expressed as x- and + """A QuadMesh represents 2D rectangular grid expressed as x- and y-coordinates defined as 1D or 2D arrays. Unlike the Image type a QuadMesh may be regularly or irregularly spaced and contain either bin edges or bin centers. If bin edges are supplied the @@ -803,6 +810,7 @@ class QuadMesh(Selection2DExpr, Dataset, Element2D): The grid orientation follows the standard matrix convention: An array Z with shape (nrows, ncolumns) is plotted with the column number as X and the row number as Y. + """ group = param.String(default="QuadMesh", constant=True) @@ -827,8 +835,8 @@ def __init__(self, data, kdims=None, vdims=None, **params): ) def trimesh(self): - """ - Converts a QuadMesh into a TriMesh. + """Converts a QuadMesh into a TriMesh. + """ # Generate vertices xs = self.interface.coords(self, 0, edges=True) @@ -874,8 +882,7 @@ def trimesh(self): class HeatMap(Selection2DExpr, Dataset, Element2D): - """ - HeatMap represents a 2D grid of categorical coordinates which can + """HeatMap represents a 2D grid of categorical coordinates which can be computed from a sparse tabular representation. A HeatMap does not automatically aggregate the supplied values, so if the data contains multiple entries for the same coordinate on the 2D grid @@ -890,6 +897,7 @@ class HeatMap(Selection2DExpr, Dataset, Element2D): However any tabular and gridded format, including pandas DataFrames, dictionaries of columns, xarray DataArrays and more are supported if the library is importable. + """ group = param.String(default='HeatMap', constant=True) @@ -911,23 +919,28 @@ def gridded(self): @property def _unique(self): - """ - Reports if the Dataset is unique. + """Reports if the Dataset is unique. + """ return self.gridded.label != 'non-unique' def range(self, dim, data_range=True, dimension_range=True): """Return the lower and upper bounds of values along dimension. - Args: - dimension: The dimension to compute the range on. - data_range (bool): Compute range from data values - dimension_range (bool): Include Dimension ranges - Whether to include Dimension range and soft_range - in range calculation - - Returns: - Tuple containing the lower and upper bound + Parameters + ---------- + dimension + The dimension to compute the range on. + data_range : bool + Compute range from data values + dimension_range : bool + Include Dimension ranges + Whether to include Dimension range and soft_range + in range calculation + + Returns + ------- + Tuple containing the lower and upper bound """ dim = self.get_dimension(dim) if dim in self.kdims: diff --git a/holoviews/element/sankey.py b/holoviews/element/sankey.py index f98e2e543b..071ac24543 100644 --- a/holoviews/element/sankey.py +++ b/holoviews/element/sankey.py @@ -17,13 +17,13 @@ class _layout_sankey(Operation): - """ - Computes a Sankey diagram from a Graph element for internal use in + """Computes a Sankey diagram from a Graph element for internal use in the Sankey element constructor. Adapted from d3-sankey under BSD-3 license. - Source: https://github.com/d3/d3-sankey/tree/v0.12.3 + Source : https://github.com/d3/d3-sankey/tree/v0.12.3 + """ bounds = param.NumericTuple(default=(0, 0, 1000, 500)) @@ -76,9 +76,9 @@ def layout(self, element, **params): @classmethod def computeNodeLinks(cls, element, graph): - """ - Populate the sourceLinks and targetLinks for each node. + """Populate the sourceLinks and targetLinks for each node. Also, if the source and target are not objects, assume they are indices. + """ index = element.nodes.kdims[-1] node_map = {} @@ -102,8 +102,8 @@ def computeNodeLinks(cls, element, graph): @classmethod def computeNodeValues(cls, graph): - """ - Compute the value (size) of each node by summing the associated links. + """Compute the value (size) of each node by summing the associated links. + """ for node in graph['nodes']: source_val = np.sum([l['value'] for l in node['sourceLinks']]) @@ -306,7 +306,9 @@ def relaxLeftToRight(self, columns, alpha, beta, py): self.resolveCollisions(column, beta, py) def relaxRightToLeft(self, columns, alpha, beta, py): - """Reposition each node based on its outgoing (source) links.""" + """Reposition each node based on its outgoing (source) links. + + """ for column in columns[-2::-1]: for source in column: y = 0 @@ -402,9 +404,9 @@ def computePaths(self, graph): class Sankey(Graph): - """ - Sankey is an acyclic, directed Graph type that represents the flow + """Sankey is an acyclic, directed Graph type that represents the flow of some quantity between its nodes. + """ group = param.String(default='Sankey', constant=True) diff --git a/holoviews/element/selection.py b/holoviews/element/selection.py index 349c3ee667..7d0ca40b0d 100644 --- a/holoviews/element/selection.py +++ b/holoviews/element/selection.py @@ -1,6 +1,6 @@ -""" -Defines mix-in classes to handle support for linked brushing on +"""Defines mix-in classes to handle support for linked brushing on elements. + """ import sys @@ -215,9 +215,9 @@ def spatial_bounds_select(xvals, yvals, bounds): class Selection2DExpr(SelectionIndexExpr): - """ - Mixin class for Cartesian 2D elements to add basic support for + """Mixin class for Cartesian 2D elements to add basic support for SelectionExpr streams. + """ _selection_dims = 2 @@ -405,9 +405,9 @@ def _get_lasso_selection(self, x0dim, y0dim, x1dim, y1dim, geometry, **kwargs): class SelectionPolyExpr(Selection2DExpr): def _skip(self, **kwargs): - """ - Do not skip geometry selections until polygons support returning + """Do not skip geometry selections until polygons support returning indexes on lasso based selections. + """ skip = kwargs.get('index_cols') and self._index_skip and 'geometry' not in kwargs if skip: @@ -441,9 +441,9 @@ def _get_lasso_selection(self, xdim, ydim, geometry, **kwargs): class Selection1DExpr(Selection2DExpr): - """ - Mixin class for Cartesian 1D Chart elements to add basic support for + """Mixin class for Cartesian 1D Chart elements to add basic support for SelectionExpr streams. + """ _selection_dims = 1 diff --git a/holoviews/element/stats.py b/holoviews/element/stats.py index daaebf1113..ac24b118da 100644 --- a/holoviews/element/stats.py +++ b/holoviews/element/stats.py @@ -9,11 +9,11 @@ class StatisticsElement(Dataset, Element2D): - """ - StatisticsElement provides a baseclass for Element types that + """StatisticsElement provides a baseclass for Element types that compute statistics based on the input data, usually a density. The value dimension of such elements are therefore usually virtual and not computed until the element is plotted. + """ __abstract = True @@ -38,8 +38,8 @@ def __init__(self, data, kdims=None, vdims=None, **params): @property def dataset(self): - """ - The Dataset that this object was created from + """The Dataset that this object was created from + """ from . import Dataset if self._dataset is None: @@ -54,15 +54,20 @@ def dataset(self): def range(self, dim, data_range=True, dimension_range=True): """Return the lower and upper bounds of values along dimension. - Args: - dimension: The dimension to compute the range on. - data_range (bool): Compute range from data values - dimension_range (bool): Include Dimension ranges - Whether to include Dimension range and soft_range - in range calculation - - Returns: - Tuple containing the lower and upper bound + Parameters + ---------- + dimension + The dimension to compute the range on. + data_range : bool + Compute range from data values + dimension_range : bool + Include Dimension ranges + Whether to include Dimension range and soft_range + in range calculation + + Returns + ------- + Tuple containing the lower and upper bound """ iskdim = self.get_dimension(dim) not in self.vdims return super().range(dim, iskdim, dimension_range) @@ -70,18 +75,23 @@ def range(self, dim, data_range=True, dimension_range=True): def dimension_values(self, dim, expanded=True, flat=True): """Return the values along the requested dimension. - Args: - dimension: The dimension to return values for - expanded (bool, optional): Whether to expand values - Whether to return the expanded values, behavior depends - on the type of data: - * Columnar: If false returns unique values - * Geometry: If false returns scalar values per geometry - * Gridded: If false returns 1D coordinates - flat (bool, optional): Whether to flatten array - - Returns: - NumPy array of values along the requested dimension + Parameters + ---------- + dimension + The dimension to return values for + expanded : bool, optional + Whether to expand values + Whether to return the expanded values, behavior depends + on the type of data: + * Columnar: If false returns unique values + * Geometry: If false returns scalar values per geometry + * Gridded: If false returns 1D coordinates + flat : bool, optional + Whether to flatten array + + Returns + ------- + NumPy array of values along the requested dimension """ dim = self.get_dimension(dim, strict=True) if dim in self.vdims: @@ -94,11 +104,14 @@ def get_dimension_type(self, dim): Type is determined by Dimension.type attribute or common type of the dimension values, otherwise None. - Args: - dimension: Dimension to look up by name or by index + Parameters + ---------- + dimension + Dimension to look up by name or by index - Returns: - Declared type of values along the dimension + Returns + ------- + Declared type of values along the dimension """ dim = self.get_dimension(dim) if dim is None: @@ -115,12 +128,16 @@ def dframe(self, dimensions=None, multi_index=False): Returns a pandas dataframe of columns along each dimension, either completely flat or indexed by key dimensions. - Args: - dimensions: Dimensions to return as columns - multi_index: Convert key dimensions to (multi-)index + Parameters + ---------- + dimensions + Dimensions to return as columns + multi_index + Convert key dimensions to (multi-)index - Returns: - DataFrame of columns corresponding to each dimension + Returns + ------- + DataFrame of columns corresponding to each dimension """ if dimensions: dimensions = [self.get_dimension(d, strict=True) for d in dimensions] @@ -139,11 +156,14 @@ def columns(self, dimensions=None): Returns a dictionary of column arrays along each dimension of the element. - Args: - dimensions: Dimensions to return as columns + Parameters + ---------- + dimensions + Dimensions to return as columns - Returns: - Dictionary of arrays for each dimension + Returns + ------- + Dictionary of arrays for each dimension """ if dimensions is None: dimensions = self.kdims @@ -158,10 +178,10 @@ def columns(self, dimensions=None): class Bivariate(Selection2DExpr, StatisticsElement): - """ - Bivariate elements are containers for two dimensional data, which + """Bivariate elements are containers for two dimensional data, which is to be visualized as a kernel density estimate. The data should be supplied in a tabular format of x- and y-columns. + """ group = param.String(default="Bivariate", constant=True) @@ -173,11 +193,11 @@ class Bivariate(Selection2DExpr, StatisticsElement): class Distribution(Selection1DExpr, StatisticsElement): - """ - Distribution elements provides a representation for a + """Distribution elements provides a representation for a one-dimensional distribution which can be visualized as a kernel density estimate. The data should be supplied in a tabular format and will use the first column. + """ group = param.String(default='Distribution', constant=True) @@ -188,11 +208,11 @@ class Distribution(Selection1DExpr, StatisticsElement): class BoxWhisker(Selection1DExpr, Dataset, Element2D): - """ - BoxWhisker represent data as a distributions highlighting the + """BoxWhisker represent data as a distributions highlighting the median, mean and various percentiles. It may have a single value dimension and any number of key dimensions declaring the grouping of each violin. + """ group = param.String(default='BoxWhisker', constant=True) @@ -205,23 +225,23 @@ class BoxWhisker(Selection1DExpr, Dataset, Element2D): class Violin(BoxWhisker): - """ - Violin elements represent data as 1D distributions visualized + """Violin elements represent data as 1D distributions visualized as a kernel-density estimate. It may have a single value dimension and any number of key dimensions declaring the grouping of each violin. + """ group = param.String(default='Violin', constant=True) class HexTiles(Selection2DExpr, Dataset, Element2D): - """ - HexTiles is a statistical element with a visual representation + """HexTiles is a statistical element with a visual representation that renders a density map of the data values as a hexagonal grid. Before display the data is aggregated either by counting the values in each hexagonal bin or by computing aggregates. + """ group = param.String(default='HexTiles', constant=True) diff --git a/holoviews/element/tabular.py b/holoviews/element/tabular.py index a349298732..f2f0b069a8 100644 --- a/holoviews/element/tabular.py +++ b/holoviews/element/tabular.py @@ -8,14 +8,14 @@ class ItemTable(Element): - """ - A tabular element type to allow convenient visualization of either + """A tabular element type to allow convenient visualization of either a standard Python dictionary or a list of tuples (i.e. input suitable for an dict constructor). Tables store heterogeneous data with different labels. Dimension objects are also accepted as keys, allowing dimensional information (e.g. type and units) to be associated per heading. + """ kdims = param.List(default=[], bounds=(0, 0), doc=""" @@ -50,8 +50,8 @@ def __init__(self, data, **params): super().__init__(str_keys, **params) def __getitem__(self, heading): - """ - Get the value associated with the given heading (key). + """Get the value associated with the given heading (key). + """ if heading == (): return self @@ -82,8 +82,8 @@ def reduce(self, dimensions=None, function=None, **reduce_map): 'cannot be reduced.') def pprint_cell(self, row, col): - """ - Get the formatted cell value for the given row and column indices. + """Get the formatted cell value for the given row and column indices. + """ if col > 2: raise Exception("Only two columns available in a ItemTable.") @@ -101,9 +101,9 @@ def hist(self, *args, **kwargs): "don't support histograms.") def cell_type(self, row, col): - """ - Returns the cell type given a row and column index. The common + """Returns the cell type given a row and column index. The common basic cell types are 'data' and 'heading'. + """ if col == 0: return 'heading' else: return 'data' @@ -111,9 +111,9 @@ def cell_type(self, row, col): class Table(SelectionIndexExpr, Dataset, Tabular): - """ - Table is a Dataset type, which gets displayed in a tabular + """Table is a Dataset type, which gets displayed in a tabular format and is convertible to most other Element types. + """ group = param.String(default='Table', constant=True, doc=""" diff --git a/holoviews/element/tiles.py b/holoviews/element/tiles.py index e9c01b61e5..372cb7c98d 100644 --- a/holoviews/element/tiles.py +++ b/holoviews/element/tiles.py @@ -9,8 +9,7 @@ class Tiles(Element2D): - """ - The Tiles element represents tile sources, specified as URL + """The Tiles element represents tile sources, specified as URL containing different template variables or xyzservices.TileProvider. These variables correspond to three different formats for specifying the spatial location and zoom level of the requested tiles: @@ -25,6 +24,7 @@ class Tiles(Element2D): defined as eastings and northings. Any data overlaid on a tile source therefore has to be defined in those coordinates or be projected (e.g. using GeoViews). + """ kdims = param.List(default=[Dimension('x'), Dimension('y')], @@ -51,23 +51,23 @@ def dimension_values(self, dimension, expanded=True, flat=True): @staticmethod def lon_lat_to_easting_northing(longitude, latitude): - """ - Projects the given longitude, latitude values into Web Mercator + """Projects the given longitude, latitude values into Web Mercator (aka Pseudo-Mercator or EPSG:3857) coordinates. See docstring for holoviews.util.transform.lon_lat_to_easting_northing for more information + """ return lon_lat_to_easting_northing(longitude, latitude) @staticmethod def easting_northing_to_lon_lat(easting, northing): - """ - Projects the given easting, northing values into + """Projects the given easting, northing values into longitude, latitude coordinates. See docstring for holoviews.util.transform.easting_northing_to_lon_lat for more information + """ return easting_northing_to_lon_lat(easting, northing) diff --git a/holoviews/element/util.py b/holoviews/element/util.py index bde652c71d..b6b55fadd5 100644 --- a/holoviews/element/util.py +++ b/holoviews/element/util.py @@ -29,9 +29,9 @@ def split_path(path): - """ - Split a Path type containing a single NaN separated path into + """Split a Path type containing a single NaN separated path into multiple subpaths. + """ path = path.split(0, 1)[0] values = path.dimension_values(0) @@ -48,10 +48,10 @@ def split_path(path): def compute_slice_bounds(slices, scs, shape): - """ - Given a 2D selection consisting of slices/coordinates, a + """Given a 2D selection consisting of slices/coordinates, a SheetCoordinateSystem and the shape of the array returns a new BoundingBox representing the sliced region. + """ xidx, yidx = slices ys, xs = shape @@ -100,8 +100,8 @@ def compute_slice_bounds(slices, scs, shape): def reduce_fn(x): - """ - Aggregation function to get the first non-zero value. + """Aggregation function to get the first non-zero value. + """ values = x.values if isinstance(x, pd.Series) else x for v in values: @@ -111,8 +111,7 @@ def reduce_fn(x): class categorical_aggregate2d(Operation): - """ - Generates a gridded Dataset of 2D aggregate arrays indexed by the + """Generates a gridded Dataset of 2D aggregate arrays indexed by the first two dimensions of the passed Element, turning all remaining dimensions into value dimensions. The key dimensions of the gridded array are treated as categorical indices. Useful for data @@ -128,6 +127,7 @@ class categorical_aggregate2d(Operation): Dataset({'Country': ['USA', 'UK'], 'Year': [2000, 2005], 'Population': [[ 282.2 , np.nan], [np.nan, 58.89]]}, kdims=['Country', 'Year'], vdims=['Population']) + """ datatype = param.List(default=['xarray', 'grid'], doc=""" @@ -135,9 +135,9 @@ class categorical_aggregate2d(Operation): @classmethod def _get_coords(cls, obj: Dataset): - """ - Get the coordinates of the 2D aggregate, maintaining the correct + """Get the coordinates of the 2D aggregate, maintaining the correct sorting order. + """ xdim, ydim = obj.dimensions(label=True)[:2] xcoords = obj.dimension_values(xdim, False) @@ -173,9 +173,9 @@ def _get_coords(cls, obj: Dataset): return np.asarray(xcoords), np.asarray(ycoords) def _aggregate_dataset(self, obj): - """ - Generates a gridded Dataset from a column-based dataset and + """Generates a gridded Dataset from a column-based dataset and lists of xcoords and ycoords + """ xcoords, ycoords = self._get_coords(obj) dim_labels = obj.dimensions(label=True) @@ -227,10 +227,10 @@ def _aggregate_dataset_pandas(self, obj): return obj.clone(data, datatype=self.p.datatype, label=label) def _process(self, obj, key=None): - """ - Generates a categorical 2D aggregate by inserting NaNs at all + """Generates a categorical 2D aggregate by inserting NaNs at all cross-product locations that do not already have a value assigned. Returns a 2D gridded Dataset object. + """ if isinstance(obj, Dataset) and obj.interface.gridded: return obj @@ -245,8 +245,8 @@ def _process(self, obj, key=None): def circular_layout(nodes): - """ - Lay out nodes on a circle and add node index. + """Lay out nodes on a circle and add node index. + """ N = len(nodes) if not N: @@ -258,9 +258,9 @@ def circular_layout(nodes): def quadratic_bezier(start, end, c0=(0, 0), c1=(0, 0), steps=50): - """ - Compute quadratic bezier spline given start and end coordinate and + """Compute quadratic bezier spline given start and end coordinate and two control points. + """ steps = np.linspace(0, 1, steps) sx, sy = start @@ -275,11 +275,11 @@ def quadratic_bezier(start, end, c0=(0, 0), c1=(0, 0), steps=50): def connect_edges_pd(graph): - """ - Given a Graph element containing abstract edges compute edge + """Given a Graph element containing abstract edges compute edge segments directly connecting the source and target nodes. This operation depends on pandas and is a lot faster than the pure NumPy equivalent. + """ edges = graph.dframe() edges.index.name = 'graph_edge_index' @@ -301,11 +301,11 @@ def connect_edges_pd(graph): def connect_tri_edges_pd(trimesh): - """ - Given a TriMesh element containing abstract edges compute edge + """Given a TriMesh element containing abstract edges compute edge segments directly connecting the source and target nodes. This operation depends on pandas and is a lot faster than the pure NumPy equivalent. + """ edges = trimesh.dframe().copy() edges.index.name = 'trimesh_edge_index' @@ -327,11 +327,11 @@ def connect_tri_edges_pd(trimesh): def connect_edges(graph): - """ - Given a Graph element containing abstract edges compute edge + """Given a Graph element containing abstract edges compute edge segments directly connecting the source and target nodes. This operation just uses internal HoloViews operations and will be a lot slower than the pandas equivalent. + """ paths = [] for start, end in graph.array(graph.kdims): diff --git a/holoviews/ipython/__init__.py b/holoviews/ipython/__init__.py index c2ff66a2fc..0d720da03b 100644 --- a/holoviews/ipython/__init__.py +++ b/holoviews/ipython/__init__.py @@ -21,17 +21,17 @@ AttrTree._disabled_prefixes = ['_repr_','_ipython_canary_method_should_not_exist'] def show_traceback(): - """ - Display the full traceback after an abbreviated traceback has occurred. + """Display the full traceback after an abbreviated traceback has occurred. + """ from .display_hooks import FULL_TRACEBACK print(FULL_TRACEBACK) class IPTestCase(ComparisonTestCase): - """ - This class extends ComparisonTestCase to handle IPython specific + """This class extends ComparisonTestCase to handle IPython specific objects and support the execution of cells and magic. + """ def setUp(self): @@ -59,23 +59,29 @@ def get_object(self, name): def cell(self, line): - "Run an IPython cell" + """Run an IPython cell + + """ self.ip.run_cell(line, silent=True) def cell_magic(self, *args, **kwargs): - "Run an IPython cell magic" + """Run an IPython cell magic + + """ self.ip.run_cell_magic(*args, **kwargs) def line_magic(self, *args, **kwargs): - "Run an IPython line magic" + """Run an IPython line magic + + """ self.ip.run_line_magic(*args, **kwargs) class notebook_extension(extension): - """ - Notebook specific extension to hv.extension that offers options for + """Notebook specific extension to hv.extension that offers options for controlling the notebook environment. + """ css = param.String(default='', doc="Optional CSS rule set to apply to the notebook.") @@ -204,7 +210,9 @@ def __call__(self, *args, **params): @classmethod def completions_sorting_key(cls, word): - "Fixed version of IPyton.completer.completions_sorting_key" + """Fixed version of IPyton.completer.completions_sorting_key + + """ prio1, prio2 = 0, 0 if word.startswith('__'): prio1 = 2 elif word.startswith('_'): prio1 = 1 @@ -219,9 +227,9 @@ def completions_sorting_key(cls, word): def _get_resources(self, args, params): - """ - Finds the list of resources from the keyword parameters and pops + """Finds the list of resources from the keyword parameters and pops them out of the params dictionary. + """ resources = [] disabled = [] @@ -248,8 +256,8 @@ def _get_resources(self, args, params): @classmethod def load_logo(cls, logo=False, bokeh_logo=False, mpl_logo=False, plotly_logo=False): - """ - Allow to display Holoviews' logo and the plotting extensions' logo. + """Allow to display Holoviews' logo and the plotting extensions' logo. + """ import jinja2 diff --git a/holoviews/ipython/archive.py b/holoviews/ipython/archive.py index 8155e4c0a1..415fffbcd5 100644 --- a/holoviews/ipython/archive.py +++ b/holoviews/ipython/archive.py @@ -1,6 +1,6 @@ -""" -Implements NotebookArchive used to automatically capture notebook data +"""Implements NotebookArchive used to automatically capture notebook data and export it to disk via the display hooks. + """ import os @@ -20,11 +20,12 @@ class NotebookArchive(FileArchive): - """ - FileArchive that can automatically capture notebook data via the + """FileArchive that can automatically capture notebook data via the display hooks and automatically adds a notebook HTML snapshot to the archive upon export. + """ + exporters = param.List(default=[Pickler]) skip_notebook_export = param.Boolean(default=False, doc=""" @@ -68,8 +69,8 @@ def __init__(self, **params): def get_namespace(self): - """ - Find the name the user is using to access holoviews. + """Find the name the user is using to access holoviews. + """ if 'holoviews' not in sys.modules: raise ImportError('HoloViews does not seem to be imported') @@ -81,7 +82,9 @@ def get_namespace(self): def last_export_status(self): - "Helper to show the status of the last call to the export method." + """Helper to show the status of the last call to the export method. + + """ if self.export_success is True: print("The last call to holoviews.archive.export was successful.") return @@ -97,9 +100,9 @@ def last_export_status(self): def auto(self, enabled=True, clear=False, **kwargs): - """ - Method to enable or disable automatic capture, allowing you to + """Method to enable or disable automatic capture, allowing you to simultaneously set the instance parameters. + """ self.namespace = self.get_namespace() self.notebook_name = "{notebook}" @@ -120,8 +123,8 @@ def auto(self, enabled=True, clear=False, **kwargs): tstamp if enabled else '')) def export(self, timestamp=None): - """ - Get the current notebook data and export. + """Get the current notebook data and export. + """ if self._timestamp is None: raise Exception("No timestamp set. Has the archive been initialized?") @@ -150,7 +153,9 @@ def export(self, timestamp=None): def add(self, obj=None, filename=None, data=None, info=None, html=None): - "Similar to FileArchive.add but accepts html strings for substitution" + """Similar to FileArchive.add but accepts html strings for substitution + + """ if info is None: info = {} initial_last_key = list(self._files.keys())[-1] if len(self) else None @@ -189,7 +194,9 @@ def _clear_notebook(self, node): # pragma: no cover def _export_with_html(self): # pragma: no cover - "Computes substitutions before using nbconvert with preprocessors" + """Computes substitutions before using nbconvert with preprocessors + + """ self.export_success = False try: tstamp = time.strftime(self.timestamp_format, self._timestamp) @@ -240,7 +247,9 @@ def _export_with_html(self): # pragma: no cover self.export_success = True def _get_notebook_node(self): # pragma: no cover - "Load captured notebook node" + """Load captured notebook node + + """ size = len(self._notebook_data) if size == 0: raise Exception("Captured buffer size for notebook node is zero.") diff --git a/holoviews/ipython/display_hooks.py b/holoviews/ipython/display_hooks.py index 23ac45c705..f803743413 100644 --- a/holoviews/ipython/display_hooks.py +++ b/holoviews/ipython/display_hooks.py @@ -1,5 +1,5 @@ -""" -Definition and registration of display hooks for the IPython Notebook. +"""Definition and registration of display hooks for the IPython Notebook. + """ import sys import traceback @@ -51,7 +51,9 @@ def max_frame_warning(max_frames): ) def process_object(obj): - "Hook to process the object currently being displayed." + """Hook to process the object currently being displayed. + + """ invalid_options = OptsMagic.process_element(obj) if invalid_options: return invalid_options OutputMagic.info(obj) @@ -77,8 +79,8 @@ def render(obj, **kwargs): def single_frame_plot(obj): - """ - Returns plot, renderer and format for single frame export. + """Returns plot, renderer and format for single frame export. + """ obj = Layout(obj) if isinstance(obj, AdjointLayout) else obj @@ -93,20 +95,26 @@ def single_frame_plot(obj): def first_frame(obj): - "Only display the first frame of an animated plot" + """Only display the first frame of an animated plot + + """ plot, renderer, fmt = single_frame_plot(obj) plot.update(0) return {'text/html': renderer.html(plot, fmt)} def middle_frame(obj): - "Only display the (approximately) middle frame of an animated plot" + """Only display the (approximately) middle frame of an animated plot + + """ plot, renderer, fmt = single_frame_plot(obj) middle_frame = int(len(plot) / 2) plot.update(middle_frame) return {'text/html': renderer.html(plot, fmt)} def last_frame(obj): - "Only display the last frame of an animated plot" + """Only display the last frame of an animated plot + + """ plot, renderer, fmt = single_frame_plot(obj) plot.update(len(plot)) return {'text/html': renderer.html(plot, fmt)} @@ -133,10 +141,10 @@ def option_state(element): def display_hook(fn): - """ - A decorator to wrap display hooks that return a MIME bundle or None. + """A decorator to wrap display hooks that return a MIME bundle or None. Additionally it handles adding output to the notebook archive, saves files specified with the output magic and handles tracebacks. + """ @wraps(fn) def wrapped(element): @@ -236,10 +244,10 @@ def grid_display(grid, max_frames): def display(obj, raw_output=False, **kwargs): - """ - Renders any HoloViews object to HTML and displays it + """Renders any HoloViews object to HTML and displays it using the IPython display function. If raw is enabled the raw HTML is returned instead of displaying it directly. + """ if not Store.loaded_backends() and isinstance(obj, Dimensioned): raise RuntimeError('To use display on a HoloViews object ensure ' @@ -288,9 +296,9 @@ def pprint_display(obj): def image_display(element, max_frames, fmt): - """ - Used to render elements to an image format (svg or png) if requested + """Used to render elements to an image format (svg or png) if requested in the display formats. + """ if fmt not in Store.display_formats: return None @@ -315,16 +323,16 @@ def image_display(element, max_frames, fmt): @display_hook def png_display(element, max_frames): - """ - Used to render elements to PNG if requested in the display formats. + """Used to render elements to PNG if requested in the display formats. + """ return image_display(element, max_frames, fmt='png') @display_hook def svg_display(element, max_frames): - """ - Used to render elements to SVG if requested in the display formats. + """Used to render elements to SVG if requested in the display formats. + """ return image_display(element, max_frames, fmt='svg') diff --git a/holoviews/ipython/magics.py b/holoviews/ipython/magics.py index 99bc2ae187..561e391b17 100644 --- a/holoviews/ipython/magics.py +++ b/holoviews/ipython/magics.py @@ -40,8 +40,8 @@ def info(cls, obj): @classmethod def pprint(cls): - """ - Pretty print the current element options + """Pretty print the current element options + """ current, count = '', 0 for k,v in Store.output_settings.options.items(): @@ -101,9 +101,9 @@ def warnfn(msg): @magics_class class CompositorMagic(Magics): - """ - Magic allowing easy definition of compositor operations. + """Magic allowing easy definition of compositor operations. Consult %compositor? for more information. + """ def __init__(self, *args, **kwargs): @@ -146,14 +146,17 @@ def option_completer(cls, k,v): class OptsCompleter: + """Implements the TAB-completion for the %%opts magic. + """ - Implements the TAB-completion for the %%opts magic. - """ + _completions = {} # Contains valid plot and style keywords per Element @classmethod def setup_completer(cls): - "Get the dictionary of valid completions" + """Get the dictionary of valid completions + + """ try: for element in Store.options().keys(): options = Store.options()['.'.join(element)] @@ -167,9 +170,9 @@ def setup_completer(cls): @classmethod def dotted_completion(cls, line, sorted_keys, compositor_defs): - """ - Supply the appropriate key in Store.options and supply + """Supply the appropriate key in Store.options and supply suggestions for further completion. + """ completion_key, suggestions = None, [] tokens = [t for t in reversed(line.replace('.', ' ').split())] @@ -201,7 +204,9 @@ def _inside_delims(cls, line, opener, closer): @classmethod def option_completer(cls, k,v): - "Tab completion hook for the %%opts cell magic." + """Tab completion hook for the %%opts cell magic. + + """ line = v.text_until_cursor completions = cls.setup_completer() compositor_defs = {el.group:el.output_type.__name__ @@ -236,22 +241,23 @@ def line_completer(cls, line, completions, compositor_defs): @magics_class class OptsMagic(Magics): - """ - Magic for easy customising of normalization, plot and style options. + """Magic for easy customising of normalization, plot and style options. Consult %%opts? for more information. + """ + error_message = None # If not None, the error message that will be displayed opts_spec = None # Next id to propagate, binding displayed object together. strict = False @classmethod def process_element(cls, obj): - """ - To be called by the display hook which supplies the element to + """To be called by the display hook which supplies the element to be displayed. Any customisation of the object can then occur before final display. If there is any error, a HTML message may be returned. If None is returned, display will proceed as normal. + """ if cls.error_message: if cls.strict: @@ -273,9 +279,9 @@ def register_custom_spec(cls, spec): @classmethod def _partition_lines(cls, line, cell): - """ - Check the code for additional use of %%opts. Enables + """Check the code for additional use of %%opts. Enables multi-line use of %%opts in a single call to the magic. + """ if cell is None: return (line, cell) specs, code = [line], [] @@ -289,15 +295,14 @@ def _partition_lines(cls, line, cell): @line_cell_magic def opts(self, line='', cell=None): - """ - The opts line/cell magic with tab-completion. + """The opts line/cell magic with tab-completion. %%opts [ [path] [normalization] [plotting options] [style options]]+ - path: A dotted type.group.label specification + path : A dotted type.group.label specification (e.g. Image.Grayscale.Photo) - normalization: List of normalization options delimited by braces. + normalization : List of normalization options delimited by braces. One of | -axiswise | -framewise | +axiswise | +framewise | E.g. { +axiswise +framewise } @@ -313,6 +318,7 @@ def opts(self, line='', cell=None): More information may be found in the class docstring of util.parser.OptsSpec. + """ line, cell = self._partition_lines(line, cell) try: @@ -354,11 +360,11 @@ def opts(self, line='', cell=None): @magics_class class TimerMagic(Magics): - """ - A line magic for measuring the execution time of multiple cells. + """A line magic for measuring the execution time of multiple cells. After you start/reset the timer with '%timer start' you may view elapsed time with any subsequent calls to %timer. + """ start_time = None @@ -376,8 +382,7 @@ def option_completer(cls, k,v): @line_magic def timer(self, line=''): - """ - Timer magic to print initial date/time information and + """Timer magic to print initial date/time information and subsequent elapsed time intervals. To start the timer, run: @@ -389,6 +394,7 @@ def timer(self, line=''): Subsequent calls to %timer will print the elapsed time relative to the time when %timer start was called. Subsequent calls to %timer start may also be used to reset the timer. + """ if line.strip() not in ['', 'start']: print("Invalid argument to %timer. For more information consult %timer?") diff --git a/holoviews/ipython/preprocessors.py b/holoviews/ipython/preprocessors.py index 3a20a659e6..2ac3cac990 100644 --- a/holoviews/ipython/preprocessors.py +++ b/holoviews/ipython/preprocessors.py @@ -1,7 +1,7 @@ -""" -Prototype demo: +"""Prototype demo: python holoviews/ipython/convert.py Conversion_Example.ipynb | python + """ import ast @@ -9,9 +9,9 @@ def comment_out_magics(source): - """ - Utility used to make sure AST parser does not choke on unrecognized + """Utility used to make sure AST parser does not choke on unrecognized magics. + """ filtered = [] for line in source.splitlines(): @@ -23,12 +23,12 @@ def comment_out_magics(source): def wrap_cell_expression(source, template='{expr}'): - """ - If a cell ends in an expression that could be displaying a HoloViews + """If a cell ends in an expression that could be displaying a HoloViews object (as determined using the AST), wrap it with a given prefix and suffix string. If the cell doesn't end in an expression, return the source unchanged. + """ cell_output_types = (ast.IfExp, ast.BoolOp, ast.BinOp, ast.Call, ast.Name, ast.Attribute) @@ -55,12 +55,12 @@ def wrap_cell_expression(source, template='{expr}'): def filter_magic(source, magic, strip=True): - """ - Given the source of a cell, filter out the given magic and collect + """Given the source of a cell, filter out the given magic and collect the lines using the magic into a list. If strip is True, the IPython syntax part of the magic (e.g. %magic or %%magic) is stripped from the returned lines. + """ filtered, magic_lines=[],[] for line in source.splitlines(): @@ -74,8 +74,8 @@ def filter_magic(source, magic, strip=True): def strip_magics(source): - """ - Given the source of a cell, filter out all cell and line magics. + """Given the source of a cell, filter out all cell and line magics. + """ filtered=[] for line in source.splitlines(): @@ -85,9 +85,9 @@ def strip_magics(source): def replace_line_magic(source, magic, template='{line}'): - """ - Given a cell's source, replace line magics using a formatting + """Given a cell's source, replace line magics using a formatting template, where {line} is the string that follows the magic. + """ filtered = [] for line in source.splitlines(): @@ -101,9 +101,9 @@ def replace_line_magic(source, magic, template='{line}'): class OptsMagicProcessor(Preprocessor): - """ - Preprocessor to convert notebooks to Python source to convert use of + """Preprocessor to convert notebooks to Python source to convert use of opts magic to use the util.opts utility instead. + """ def preprocess_cell(self, cell, resources, index): @@ -123,9 +123,9 @@ def __call__(self, nb, resources): return self.preprocess(nb,resources) class OutputMagicProcessor(Preprocessor): - """ - Preprocessor to convert notebooks to Python source to convert use of + """Preprocessor to convert notebooks to Python source to convert use of output magic to use the util.output utility instead. + """ def preprocess_cell(self, cell, resources, index): @@ -144,10 +144,10 @@ def __call__(self, nb, resources): return self.preprocess(nb,resources) class StripMagicsProcessor(Preprocessor): - """ - Preprocessor to convert notebooks to Python source to strips out all + """Preprocessor to convert notebooks to Python source to strips out all magics. To be applied after the preprocessors that can handle holoviews magics appropriately. + """ def preprocess_cell(self, cell, resources, index): @@ -159,8 +159,7 @@ def __call__(self, nb, resources): return self.preprocess(nb,resources) class Substitute(Preprocessor): - """ - An nbconvert preprocessor that substitutes one set of HTML data + """An nbconvert preprocessor that substitutes one set of HTML data output for another, adding annotation to the output as required. The constructor accepts the notebook format version and a @@ -169,7 +168,9 @@ class Substitute(Preprocessor): {source_html:(target_html, annotation)} Where the annotation may be None (i.e. no annotation). + """ + annotation = '
%s
' def __init__(self, version, substitutions, **kw): @@ -182,7 +183,9 @@ def __call__(self, nb, resources): # Temporary hack around 'enabled' flag def replace(self, src): - "Given some source html substitute and annotated as applicable" + """Given some source html substitute and annotated as applicable + + """ for html in self.substitutions.keys(): if src == html: annotation = self.annotation % self.substitutions[src][1] diff --git a/holoviews/ipython/widgets.py b/holoviews/ipython/widgets.py index 72f2f49b10..0ed8ddad8a 100644 --- a/holoviews/ipython/widgets.py +++ b/holoviews/ipython/widgets.py @@ -9,12 +9,12 @@ class ProgressBar(ProgressIndicator): - """ - A simple text progress bar suitable for both the IPython notebook + """A simple text progress bar suitable for both the IPython notebook and the IPython interactive prompt. ProgressBars are automatically nested if a previous instantiated progress bars has not achieved 100% completion. + """ display = param.Selector(default='stdout', @@ -52,8 +52,9 @@ def __init__(self, **params): super().__init__(**params) def __call__(self, percentage): - " Update the progress bar within the specified percent_range" + """Update the progress bar within the specified percent_range + """ if self.start_time is None: self.start_time = time.time() span = (self.percent_range[1]-self.percent_range[0]) percentage = self.percent_range[0] + ((percentage/100.0) * span) @@ -115,9 +116,9 @@ def _get_socket(self, min_port=8080, max_port=8100, max_tries=20): class RemoteProgress(ProgressBar): - """ - Connect to a progress bar in a separate process with output_mode + """Connect to a progress bar in a separate process with output_mode set to 'broadcast' in order to display the results (to stdout). + """ hostname=param.String(default='localhost', doc=""" @@ -152,8 +153,7 @@ def __call__(self): class RunProgress(ProgressBar): - """ - RunProgress breaks up the execution of a slow running command so + """RunProgress breaks up the execution of a slow running command so that the level of completion can be displayed during execution. This class is designed to run commands that take a single numeric @@ -165,6 +165,7 @@ class RunProgress(ProgressBar): For instance, this is suitable for simulations where the numeric argument is the simulated time - typically, advancing 10 simulated seconds takes about twice as long as advancing by 5 seconds. + """ interval = param.Number(default=100, doc=""" @@ -180,9 +181,9 @@ def __init__(self, **params): super().__init__(**params) def __call__(self, value): - """ - Execute the run_hook to a total of value, breaking up progress + """Execute the run_hook to a total of value, breaking up progress updates by the value specified by interval. + """ completed = 0 while (value - completed) >= self.interval: @@ -196,12 +197,12 @@ def __call__(self, value): def progress(iterator, enum=False, length=None): - """ - A helper utility to display a progress bar when iterating over a + """A helper utility to display a progress bar when iterating over a collection of a fixed length or a generator (with a declared length). If enum=True, then equivalent to enumerate with a progress bar. + """ progress = ProgressBar() length = len(iterator) if length is None else length diff --git a/holoviews/operation/datashader.py b/holoviews/operation/datashader.py index 31c241c5fa..ee8af9abc0 100644 --- a/holoviews/operation/datashader.py +++ b/holoviews/operation/datashader.py @@ -78,9 +78,9 @@ class AggregationOperation(ResampleOperation2D): - """ - AggregationOperation extends the ResampleOperation2D defining an + """AggregationOperation extends the ResampleOperation2D defining an aggregator parameter used to define a datashader Reduction. + """ aggregator = param.ClassSelector(class_=(rd.Reduction, rd.summary, str), @@ -259,8 +259,7 @@ def has_by(state): class aggregate(LineAggregationOperation): - """ - aggregate implements 2D binning for any valid HoloViews Element + """aggregate implements 2D binning for any valid HoloViews Element type using datashader. I.e., this operation turns a HoloViews Element or overlay of Elements into an Image or an overlay of Images by rasterizing it. This allows quickly aggregating large @@ -281,13 +280,14 @@ class aggregate(LineAggregationOperation): is used dynamically, which means that the height and width will automatically be set to match the inner dimensions of the linked plot. + """ @classmethod def get_agg_data(cls, obj, category=None): - """ - Reduces any Overlay or NdOverlay of Elements into a single + """Reduces any Overlay or NdOverlay of Elements into a single xarray Dataset that can be aggregated. + """ paths = [] if isinstance(obj, Graph): @@ -490,11 +490,12 @@ def _apply_datashader(self, dfdata, cvs_fn, agg_fn, agg_kwargs, x, y, agg_state: return agg class curve_aggregate(aggregate): - """ - Optimized aggregation for Curve objects by setting the default + """Optimized aggregation for Curve objects by setting the default of the aggregator to self_intersect=False to be more consistent with the appearance of non-aggregated curves. + """ + aggregator = param.ClassSelector(class_=(rd.Reduction, rd.summary, str), default=rd.count(self_intersect=False), doc=""" Datashader reduction function used for aggregating the data. @@ -503,14 +504,14 @@ class curve_aggregate(aggregate): will be used. May also be defined as a string.""") class overlay_aggregate(aggregate): - """ - Optimized aggregation for NdOverlay objects by aggregating each + """Optimized aggregation for NdOverlay objects by aggregating each Element in an NdOverlay individually avoiding having to concatenate items in the NdOverlay. Works by summing sum and count aggregates and applying appropriate masking for NaN values. Mean aggregation is also supported by dividing sum and count aggregates. count_cat aggregates are grouped by the categorical dimension and a separate aggregate for each category is generated. + """ @classmethod @@ -618,10 +619,10 @@ def _process(self, element, key=None): class area_aggregate(AggregationOperation): - """ - Aggregates Area elements by filling the area between zero and + """Aggregates Area elements by filling the area between zero and the y-values if only one value dimension is defined and the area between the curves if two are provided. + """ def _process(self, element, key=None): @@ -663,9 +664,9 @@ def _process(self, element, key=None): class spread_aggregate(area_aggregate): - """ - Aggregates Spread elements by filling the area between the lower + """Aggregates Spread elements by filling the area between the lower and upper error band. + """ def _process(self, element, key=None): @@ -684,10 +685,10 @@ def _process(self, element, key=None): class spikes_aggregate(LineAggregationOperation): - """ - Aggregates Spikes elements by drawing individual line segments + """Aggregates Spikes elements by drawing individual line segments over the entire y_range if no value dimension is defined and between zero and the y-value if one is defined. + """ spike_length = param.Number(default=None, allow_None=True, doc=""" @@ -761,8 +762,8 @@ def _process(self, element, key=None): class geom_aggregate(AggregationOperation): - """ - Baseclass for aggregation of Geom elements. + """Baseclass for aggregation of Geom elements. + """ __abstract = True @@ -820,8 +821,8 @@ def _process(self, element, key=None): class segments_aggregate(geom_aggregate, LineAggregationOperation): - """ - Aggregates Segments elements. + """Aggregates Segments elements. + """ def _aggregate(self, cvs, df, x0, y0, x1, y1, agg_fn): @@ -833,8 +834,8 @@ def _aggregate(self, cvs, df, x0, y0, x1, y1, agg_fn): class rectangle_aggregate(geom_aggregate): - """ - Aggregates Rectangle elements. + """Aggregates Rectangle elements. + """ def _aggregate(self, cvs, df, x0, y0, x1, y1, agg_fn): @@ -843,14 +844,14 @@ def _aggregate(self, cvs, df, x0, y0, x1, y1, agg_fn): class regrid(AggregationOperation): - """ - regrid allows resampling a HoloViews Image type using specified + """regrid allows resampling a HoloViews Image type using specified up- and downsampling functions defined using the aggregator and interpolation parameters respectively. By default upsampling is disabled to avoid unnecessarily upscaling an image that has to be sent to the browser. Also disables expanding the image beyond its original bounds avoiding unnecessarily padding the output array with NaN values. + """ aggregator = param.ClassSelector(default=rd.mean(), @@ -982,10 +983,10 @@ def _process(self, element, key=None): class contours_rasterize(aggregate): - """ - Rasterizes the Contours element by weighting the aggregation by + """Rasterizes the Contours element by weighting the aggregation by the iso-contour levels if a value dimension is defined, otherwise default to any aggregator. + """ aggregator = param.ClassSelector(default=rd.mean(), @@ -1000,11 +1001,11 @@ def _get_aggregator(cls, element, agg, add_field=True): class trimesh_rasterize(aggregate): - """ - Rasterize the TriMesh element using the supplied aggregator. If + """Rasterize the TriMesh element using the supplied aggregator. If the TriMesh nodes or edges define a value dimension, will plot filled and shaded polygons; otherwise returns a wiremesh of the data. + """ aggregator = param.ClassSelector(default=rd.mean(), @@ -1129,10 +1130,10 @@ def _process(self, element, key=None): class quadmesh_rasterize(trimesh_rasterize): - """ - Rasterize the QuadMesh element using the supplied aggregator. + """Rasterize the QuadMesh element using the supplied aggregator. Simply converts to a TriMesh and lets trimesh_rasterize handle the actual rasterization. + """ def _precompute(self, element, agg): @@ -1182,8 +1183,7 @@ def _process(self, element, key=None): class shade(LinkableOperation): - """ - shade applies a normalization function followed by colormapping to + """shade applies a normalization function followed by colormapping to an Image or NdOverlay of Images, returning an RGB Element. The data must be in the form of a 2D or 3D DataArray, but NdOverlays of 2D Images will be automatically converted to a 3D array. @@ -1192,6 +1192,7 @@ class shade(LinkableOperation): array representing categorical aggregates will be supplied a color key for each category. The colormap (cmap) for the 2D case may be supplied as an Iterable or a Callable. + """ alpha = param.Integer(default=255, bounds=(0, 255), doc=""" @@ -1244,9 +1245,9 @@ class shade(LinkableOperation): @classmethod def concatenate(cls, overlay): - """ - Concatenates an NdOverlay of Image types into a single 3D + """Concatenates an NdOverlay of Image types into a single 3D xarray Dataset. + """ if not isinstance(overlay, NdOverlay): raise ValueError('Only NdOverlays can be concatenated') @@ -1260,8 +1261,8 @@ def concatenate(cls, overlay): @classmethod def uint32_to_uint8(cls, img): - """ - Cast uint32 RGB image to 4 uint8 channels. + """Cast uint32 RGB image to 4 uint8 channels. + """ new_array = np.flipud(img.view(dtype=np.uint8).reshape((*img.shape, 4))) new_array[new_array[:,:,3] == 0] = 0 # Set alpha 0 to 0 for all dimension @@ -1270,8 +1271,8 @@ def uint32_to_uint8(cls, img): @classmethod def uint32_to_uint8_xr(cls, img): - """ - Cast uint32 xarray DataArray to 4 uint8 channels. + """Cast uint32 xarray DataArray to 4 uint8 channels. + """ new_array = img.values.view(dtype=np.uint8).reshape((*img.shape, 4)) new_array[new_array[:,:,3] == 0] = 0 @@ -1281,8 +1282,8 @@ def uint32_to_uint8_xr(cls, img): @classmethod def rgb2hex(cls, rgb): - """ - Convert RGB(A) tuple to hex. + """Convert RGB(A) tuple to hex. + """ if len(rgb) > 3: rgb = rgb[:-1] @@ -1435,8 +1436,8 @@ def add_selector_data(cls, *, img_data, sel_data): class geometry_rasterize(LineAggregationOperation): - """ - Rasterizes geometries by converting them to spatialpandas. + """Rasterizes geometries by converting them to spatialpandas. + """ aggregator = param.ClassSelector(default=rd.mean(), @@ -1504,8 +1505,7 @@ def _process(self, element, key=None): class rasterize(AggregationOperation): - """ - Rasterize is a high-level operation that will rasterize any + """Rasterize is a high-level operation that will rasterize any Element or combination of Elements, aggregating them with the supplied aggregator and interpolation method. @@ -1524,6 +1524,7 @@ class rasterize(AggregationOperation): operation is used dynamically, which means that the width, height, x_range and y_range will automatically be set to match the inner dimensions of the linked plot and the ranges of the axes. + """ aggregator = param.ClassSelector(class_=(rd.Reduction, rd.summary, str), @@ -1608,12 +1609,12 @@ def _process(self, element, key=None): class datashade(rasterize, shade): - """ - Applies the aggregate and shade operations, aggregating all + """Applies the aggregate and shade operations, aggregating all elements in the supplied object and then applying normalization and colormapping the aggregated data returning RGB elements. See aggregate and shade operations for more details. + """ def _process(self, element, key=None): @@ -1624,9 +1625,9 @@ def _process(self, element, key=None): class stack(Operation): - """ - The stack operation allows compositing multiple RGB Elements using + """The stack operation allows compositing multiple RGB Elements using the defined compositing operator. + """ compositor = param.Selector(objects=['add', 'over', 'saturate', 'source'], @@ -1680,11 +1681,11 @@ def _process(self, overlay, key=None): class SpreadingOperation(LinkableOperation): - """ - Spreading expands each pixel in an Image based Element a certain + """Spreading expands each pixel in an Image based Element a certain number of pixels on all sides according to a given shape, merging pixels using a specified compositing operator. This can be useful to make sparse plots more visible. + """ how = param.Selector(default='source' if DATASHADER_VERSION <= (0, 11, 1) else None, @@ -1784,15 +1785,15 @@ def _process(self, element, key=None): class spread(SpreadingOperation): - """ - Spreading expands each pixel in an Image based Element a certain + """Spreading expands each pixel in an Image based Element a certain number of pixels on all sides according to a given shape, merging pixels using a specified compositing operator. This can be useful to make sparse plots more visible. See the datashader documentation for more detail: - http://datashader.org/api.html#datashader.transfer_functions.spread + https://datashader.org/api.html#datashader.transfer_functions.spread + """ px = param.Integer(default=1, doc=""" @@ -1803,8 +1804,7 @@ def _apply_spreading(self, array, how=None): class dynspread(SpreadingOperation): - """ - Spreading expands each pixel in an Image based Element a certain + """Spreading expands each pixel in an Image based Element a certain number of pixels on all sides according to a given shape, merging pixels using a specified compositing operator. This can be useful to make sparse plots more visible. Dynamic spreading determines @@ -1812,7 +1812,8 @@ class dynspread(SpreadingOperation): See the datashader documentation for more detail: - http://datashader.org/api.html#datashader.transfer_functions.dynspread + https://datashader.org/api.html#datashader.transfer_functions.dynspread + """ max_px = param.Integer(default=3, doc=""" @@ -1833,9 +1834,9 @@ def _apply_spreading(self, array, how=None): def split_dataframe(path_df): - """ - Splits a dataframe of paths separated by NaNs into individual + """Splits a dataframe of paths separated by NaNs into individual dataframes. + """ splits = np.where(path_df.iloc[:, 0].isnull())[0]+1 return [df for df in np.split(path_df, splits) if len(df) > 1] @@ -1864,11 +1865,11 @@ def _process(self, element, key=None): class bundle_graph(_connect_edges, hammer_bundle): - """ - Iteratively group edges and return as paths suitable for datashading. + """Iteratively group edges and return as paths suitable for datashading. Breaks each edge into a path with multiple line segments, and iteratively curves this path to bundle edges into groups. + """ def _bundle(self, position_df, edges_df): @@ -1877,8 +1878,8 @@ def _bundle(self, position_df, edges_df): class directly_connect_edges(_connect_edges, connect_edges): - """ - Given a Graph object will directly connect all nodes. + """Given a Graph object will directly connect all nodes. + """ def _bundle(self, position_df, edges_df): @@ -1889,11 +1890,11 @@ def identity(x): return x class inspect_mask(Operation): - """ - Operation used to display the inspection mask, for use with other + """Operation used to display the inspection mask, for use with other inspection operations. Can be used directly but is more commonly constructed using the mask property of the corresponding inspector operation. + """ pixels = param.ClassSelector(default=3, class_=(int, tuple), doc=""" @@ -1932,9 +1933,9 @@ def _indicator(self, kdims, x, y, xdelta, ydelta): class inspect(Operation): - """ - Generalized inspect operation that detects the appropriate indicator + """Generalized inspect operation that detects the appropriate indicator type. + """ pixels = param.ClassSelector(default=3, class_=(int, tuple), doc=""" @@ -2020,9 +2021,9 @@ def _get_input_type(self, operations): class inspect_base(inspect): - """ - Given datashaded aggregate (Image) output, return a set of + """Given datashaded aggregate (Image) output, return a set of (hoverable) points sampled from those near the cursor. + """ def _process(self, raster, key=None): @@ -2072,9 +2073,9 @@ def _empty_df(cls, dataset): @classmethod def _mask_dataframe(cls, raster, x, y, xdelta, ydelta): - """ - Mask the dataframe around the specified x and y position with + """Mask the dataframe around the specified x and y position with the given x and y deltas + """ ds = raster.dataset x0, x1, y0, y1 = x-xdelta, x+xdelta, y-ydelta, y+ydelta @@ -2108,9 +2109,9 @@ def _element(cls, raster, df): @classmethod def _sort_by_distance(cls, raster, df, x, y): - """ - Returns a dataframe of hits within a given mask around a given + """Returns a dataframe of hits within a given mask around a given spatial location, sorted by distance from that location. + """ ds = raster.dataset.clone(df) xs, ys = (ds.dimension_values(kd) for kd in raster.kdims) @@ -2149,9 +2150,9 @@ def _element(cls, raster, df): @classmethod def _sort_by_distance(cls, raster, df, x, y): - """ - Returns a dataframe of hits within a given mask around a given + """Returns a dataframe of hits within a given mask around a given spatial location, sorted by distance from that location. + """ xs, ys = [], [] for geom in df.geometry.array: diff --git a/holoviews/operation/downsample.py b/holoviews/operation/downsample.py index 77259b5812..c86ab4a763 100644 --- a/holoviews/operation/downsample.py +++ b/holoviews/operation/downsample.py @@ -1,5 +1,4 @@ -""" -Implements downsampling algorithms for large 1D datasets. +"""Implements downsampling algorithms for large 1D datasets. The algorithms implemented in this module have been adapted from https://github.com/predict-idlab/plotly-resampler and are reproduced @@ -26,6 +25,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + """ import math @@ -56,6 +56,7 @@ def _argmax_area(prev_x, prev_y, avg_next_x, avg_next_y, x_bucket, y_bucket): All x values in the bucket y_bucket : np.ndarray All y values in the bucket + Returns ------- int @@ -101,24 +102,29 @@ def _lttb_inner(x, y, n_out, sampled_x, offset): def _ensure_contiguous(x, y): - """ - Ensures the arrays are contiguous in memory (required by tsdownsample). + """Ensures the arrays are contiguous in memory (required by tsdownsample). + """ return np.ascontiguousarray(x), np.ascontiguousarray(y) def _lttb(x, y, n_out, **kwargs): - """ - Downsample the data using the LTTB algorithm. + """Downsample the data using the LTTB algorithm. Will use a Python/Numpy implementation if tsdownsample is not available. - Args: - x (np.ndarray): The x-values of the data. - y (np.ndarray): The y-values of the data. - n_out (int): The number of output points. - Returns: - np.array: The indexes of the selected datapoints. + Parameters + ---------- + x : np.ndarray + The x-values of the data. + y : np.ndarray + The y-values of the data. + n_out : int + The number of output points. + + Returns + ------- + np.array: The indexes of the selected datapoints. """ try: from tsdownsample import LTTBDownsampler @@ -152,15 +158,20 @@ def _lttb(x, y, n_out, **kwargs): return sampled_x def _nth_point(x, y, n_out, **kwargs): - """ - Downsampling by selecting every n-th datapoint - - Args: - x (np.ndarray): The x-values of the data. - y (np.ndarray): The y-values of the data. - n_out (int): The number of output points. - Returns: - slice: The slice of selected datapoints. + """Downsampling by selecting every n-th datapoint + + Parameters + ---------- + x : np.ndarray + The x-values of the data. + y : np.ndarray + The y-values of the data. + n_out : int + The number of output points. + + Returns + ------- + slice : The slice of selected datapoints. """ n_samples = len(x) return slice(0, n_samples, max(1, math.ceil(n_samples / n_out))) @@ -213,11 +224,11 @@ def _m4(x, y, n_out, **kwargs): } class downsample1d(ResampleOperation1D): - """ - Implements downsampling of a regularly sampled 1D dataset. + """Implements downsampling of a regularly sampled 1D dataset. If available uses the `tsdownsample` library to perform massively accelerated downsampling. + """ algorithm = param.Selector(default='lttb', objects=list(_ALGORITHMS), doc=""" @@ -282,8 +293,8 @@ def _process(self, element, key=None, shared_data=None): return element.iloc[samples] def _compute_mask(self, element): - """ - Computes the mask to apply to the element before downsampling. + """Computes the mask to apply to the element before downsampling. + """ neighbor_enabled = ( self.p.neighbor_points diff --git a/holoviews/operation/element.py b/holoviews/operation/element.py index fb157612c0..90f2c5981b 100644 --- a/holoviews/operation/element.py +++ b/holoviews/operation/element.py @@ -1,6 +1,6 @@ -""" -Collection of either extremely generic or simple Operation +"""Collection of either extremely generic or simple Operation examples. + """ import warnings from functools import partial @@ -47,8 +47,7 @@ def identity(x,k): return x class operation(Operation): - """ - The most generic operation that wraps any callable into an + """The most generic operation that wraps any callable into an Operation. The callable needs to accept an HoloViews component and a key (that may be ignored) and must return a new HoloViews component. @@ -61,6 +60,7 @@ class operation(Operation): Could be used to implement a collapse operation to subtracts the data between Rasters in an Overlay. + """ output_type = param.Parameter(default=None, doc=""" @@ -89,10 +89,10 @@ def _process(self, view, key=None): class factory(Operation): - """ - Simple operation that constructs any element that accepts some + """Simple operation that constructs any element that accepts some other element as input. For instance, RGB and HSV elements can be created from overlays of Image elements. + """ output_type = param.Parameter(default=RGB, doc=""" @@ -133,8 +133,8 @@ def _process(self, element, key=None): class method(Operation): - """ - Operation that wraps a method call + """Operation that wraps a method call + """ output_type = param.ClassSelector(class_=type, doc=""" @@ -158,8 +158,7 @@ def _process(self, element, key=None): class apply_when(param.ParameterizedFunction): - """ - Applies a selection depending on the current zoom range. If the + """Applies a selection depending on the current zoom range. If the supplied predicate function returns a True it will apply the operation otherwise it will return the raw element after the selection. For example the following will apply datashading if @@ -167,6 +166,7 @@ class apply_when(param.ParameterizedFunction): just returning the selected points element: apply_when(points, operation=datashade, predicate=lambda x: x > 1000) + """ operation = param.Callable(default=lambda x: x) @@ -203,8 +203,7 @@ def __call__(self, obj, **params): class chain(Operation): - """ - Defining an Operation chain is an easy way to define a new + """Defining an Operation chain is an easy way to define a new Operation from a series of existing ones. The argument is a list of Operation (or Operation instances) that are called in sequence to generate the returned element. @@ -218,6 +217,7 @@ class chain(Operation): Instances are only required when arguments need to be passed to individual operations so the resulting object is a function over a single argument. + """ output_type = param.Parameter(default=Image, doc=""" @@ -245,9 +245,9 @@ def _process(self, view, key=None): return processed.clone(group=self.p.group) def find(self, operation, skip_nonlinked=True): - """ - Returns the first found occurrence of an operation while + """Returns the first found occurrence of an operation while performing a backward traversal of the chain pipeline. + """ found = None for op in self.operations[::-1]: @@ -260,8 +260,7 @@ def find(self, operation, skip_nonlinked=True): class transform(Operation): - """ - Generic Operation to transform an input Image or RGBA + """Generic Operation to transform an input Image or RGBA element into an output Image. The transformation is defined by the supplied callable that accepts the data of the input Image (typically a numpy array) and returns the transformed data of the @@ -276,6 +275,7 @@ class transform(Operation): autocorrelation using the scipy library with: operator=lambda x: scipy.signal.correlate2d(x, x) + """ output_type = Image @@ -296,8 +296,7 @@ def _process(self, img, key=None): class image_overlay(Operation): - """ - Operation to build a overlay of images to a specification from a + """Operation to build a overlay of images to a specification from a subset of the required elements. This is useful for reordering the elements of an overlay, @@ -313,6 +312,7 @@ class image_overlay(Operation): strongest match will be used. In the case of a tie in match strength, the first layer in the input is used. One successful match is always required. + """ output_type = Overlay @@ -341,7 +341,9 @@ class image_overlay(Operation): @classmethod def _match(cls, el, spec): - "Return the strength of the match (None if no match)" + """Return the strength of the match (None if no match) + + """ spec_dict = dict(zip(['type', 'group', 'label'], spec.split('.'))) if not isinstance(el, Image) or spec_dict['type'] != 'Image': raise NotImplementedError("Only Image currently supported") @@ -357,10 +359,10 @@ def _match(cls, el, spec): def _match_overlay(self, raster, overlay_spec): - """ - Given a raster or input overlay, generate a list of matched + """Given a raster or input overlay, generate a list of matched elements (None if no match) and corresponding tuple of match strength values. + """ ordering = [None]*len(overlay_spec) # Elements to overlay strengths = [0]*len(overlay_spec) # Match strengths @@ -399,11 +401,12 @@ def _process(self, raster, key=None): class threshold(Operation): - """ - Threshold a given Image whereby all values higher than a given + """Threshold a given Image whereby all values higher than a given level map to the specified high value and all values lower than that level map to the specified low value. + """ + output_type = Image level = param.Number(default=0.5, doc=""" @@ -438,11 +441,11 @@ def _process(self, matrix, key=None): class gradient(Operation): - """ - Compute the gradient plot of the supplied Image. + """Compute the gradient plot of the supplied Image. If the Image value dimension is cyclic, the smallest step is taken considered the cyclic range + """ output_type = Image @@ -490,10 +493,10 @@ def _process(self, matrix, key=None): class convolve(Operation): - """ - Apply a convolution to an overlay using the top layer as the + """Apply a convolution to an overlay using the top layer as the kernel for convolving the bottom layer. Both Image elements in the input overlay should have a single value dimension. + """ output_type = Image @@ -536,12 +539,12 @@ def _process(self, overlay, key=None): class contours(Operation): - """ - Given a Image with a single channel, annotate it with contour + """Given a Image with a single channel, annotate it with contour lines for a given set of contour levels. The return is an NdOverlay with a Contours layer for each given level, overlaid on top of the input Image. + """ output_type = Overlay @@ -728,10 +731,10 @@ def points_to_datetime(points): class histogram(Operation): - """ - Returns a Histogram of the input element data, binned into + """Returns a Histogram of the input element data, binned into num_bins over the bin_range (if specified) along the specified dimension. + """ bin_range = param.NumericTuple(default=None, length=2, doc=""" @@ -957,11 +960,11 @@ def _process(self, element, key=None, groupby=False): class decimate(Operation): - """ - Decimates any column based Element to a specified number of random + """Decimates any column based Element to a specified number of random rows if the current element defined by the x_range and y_range contains more than max_samples. By default the operation returns a DynamicMap with a RangeXY stream allowing dynamic downsampling. + """ dynamic = param.Boolean(default=True, doc=""" @@ -1018,9 +1021,9 @@ def _process(self, element, key=None): class interpolate_curve(Operation): - """ - Resamples a Curve using the defined interpolation method, e.g. + """Resamples a Curve using the defined interpolation method, e.g. to represent changes in y-values as steps. + """ interpolation = param.Selector(objects=['steps-pre', 'steps-mid', @@ -1105,13 +1108,13 @@ def _process(self, element, key=None): class collapse(Operation): - """ - Given an overlay of Element types, collapse into single Element + """Given an overlay of Element types, collapse into single Element object using supplied function. Collapsing aggregates over the key dimensions of each object applying the supplied fn to each group. This is an example of an Operation that does not involve any Raster types. + """ fn = param.Callable(default=np.mean, doc=""" @@ -1127,13 +1130,13 @@ def _process(self, overlay, key=None): class gridmatrix(param.ParameterizedFunction): - """ - The gridmatrix operation takes an Element or HoloMap + """The gridmatrix operation takes an Element or HoloMap of Elements as input and creates a GridMatrix object, which plots each dimension in the Element against each other dimension. This provides a very useful overview of high-dimensional data and is inspired by pandas and seaborn scatter_matrix implementations. + """ chart_type = param.Parameter(default=Scatter, doc=""" diff --git a/holoviews/operation/normalization.py b/holoviews/operation/normalization.py index e685ad6d69..bc52437165 100644 --- a/holoviews/operation/normalization.py +++ b/holoviews/operation/normalization.py @@ -1,5 +1,4 @@ -""" -Data normalization operations. +"""Data normalization operations. Normalizing input data into a valid range is a common operation and often required before further processing. The semantics of @@ -11,6 +10,7 @@ operations per element type. Unlike display normalization, data normalizations result in transformations to the stored data within each element. + """ from collections import defaultdict @@ -24,14 +24,14 @@ class Normalization(Operation): - """ - Base class for all normalization operation. + """Base class for all normalization operation. This class standardizes how normalization is specified using the ranges and keys parameter. The ranges parameter is designed to be very flexible, allowing a concise description for simple normalization while allowing complex key- and element- specific normalization to also be specified. + """ data_range = param.Boolean(default=False, doc=""" @@ -98,9 +98,9 @@ def process_element(self, element, key, ranges=None, keys=None, **params): def get_ranges(self, element, key): - """ - Method to get the appropriate normalization range dictionary + """Method to get the appropriate normalization range dictionary given a key and element. + """ keys = self.p['keys'] ranges = self.p['ranges'] @@ -132,8 +132,7 @@ def _process(self, view, key=None): class raster_normalization(Normalization): - """ - Normalizes elements of type Raster. + """Normalizes elements of type Raster. For Raster elements containing (NxM) data, this will normalize the array/matrix into the specified range if value_dimension matches @@ -143,6 +142,7 @@ class raster_normalization(Normalization): third dimensional are normalized independently if the corresponding value dimensions are selected by the ranges dictionary. + """ def _process(self, raster, key=None): @@ -180,13 +180,13 @@ def _normalize_raster(self, raster, key): class subcoordinate_group_ranges(Operation): - """ - Compute the data range group-wise in a subcoordinate_y overlay, + """Compute the data range group-wise in a subcoordinate_y overlay, and set the dimension range of each Chart element based on the value computed for its group. This operation is useful to visually apply a group-wise min-max normalisation. + """ def _process(self, overlay, key=None): diff --git a/holoviews/operation/resample.py b/holoviews/operation/resample.py index bb482a79cb..4745386f59 100644 --- a/holoviews/operation/resample.py +++ b/holoviews/operation/resample.py @@ -9,8 +9,8 @@ class LinkableOperation(Operation): - """ - Abstract baseclass for operations supporting linked inputs. + """Abstract baseclass for operations supporting linked inputs. + """ link_inputs = param.Boolean(default=True, doc=""" @@ -25,8 +25,8 @@ class LinkableOperation(Operation): class ResampleOperation1D(LinkableOperation): - """ - Abstract baseclass for resampling operations + """Abstract baseclass for resampling operations + """ dynamic = param.Boolean(default=True, doc=""" @@ -61,8 +61,8 @@ class ResampleOperation1D(LinkableOperation): class ResampleOperation2D(ResampleOperation1D): - """ - Abstract baseclass for resampling operations + """Abstract baseclass for resampling operations + """ dynamic = param.Boolean(default=True, doc=""" diff --git a/holoviews/operation/stats.py b/holoviews/operation/stats.py index d92e9f0456..3b7a2a55cc 100644 --- a/holoviews/operation/stats.py +++ b/holoviews/operation/stats.py @@ -9,7 +9,9 @@ def _kde_support(bin_range, bw, gridsize, cut, clip): - """Establish support for a kernel density estimate.""" + """Establish support for a kernel density estimate. + + """ kmin, kmax = bin_range[0] - bw * cut, bin_range[1] + bw * cut if isfinite(clip[0]): kmin = max(kmin, clip[0]) @@ -19,8 +21,7 @@ def _kde_support(bin_range, bw, gridsize, cut, clip): class univariate_kde(Operation): - """ - Computes a 1D kernel density estimate (KDE) along the supplied + """Computes a 1D kernel density estimate (KDE) along the supplied dimension. Kernel density estimation is a non-parametric way to estimate the probability density function of a random variable. @@ -28,6 +29,7 @@ class univariate_kde(Operation): the supplied bandwidth. These kernels are then summed to produce the density estimate. By default a good bandwidth is determined using the bw_method but it may be overridden by an explicit value. + """ bw_method = param.Selector(default='scott', objects=['scott', 'silverman'], doc=""" @@ -122,8 +124,7 @@ def _process(self, element, key=None): class bivariate_kde(Operation): - """ - Computes a 2D kernel density estimate (KDE) of the first two + """Computes a 2D kernel density estimate (KDE) of the first two dimensions in the input data. Kernel density estimation is a non-parametric way to estimate the probability density function of a random variable. @@ -132,6 +133,7 @@ class bivariate_kde(Operation): the supplied bandwidth. These kernels are then summed to produce the density estimate. By default a good bandwidth is determined using the bw_method but it may be overridden by an explicit value. + """ contours = param.Boolean(default=True, doc=""" diff --git a/holoviews/operation/timeseries.py b/holoviews/operation/timeseries.py index f0cb099c9a..f541fc4862 100644 --- a/holoviews/operation/timeseries.py +++ b/holoviews/operation/timeseries.py @@ -9,8 +9,8 @@ class RollingBase(param.Parameterized): - """ - Parameters shared between `rolling` and `rolling_outlier_std`. + """Parameters shared between `rolling` and `rolling_outlier_std`. + """ center = param.Boolean(default=True, doc=""" @@ -31,8 +31,8 @@ def _roll_kwargs(self): class rolling(Operation,RollingBase): - """ - Applies a function over a rolling window. + """Applies a function over a rolling window. + """ window_type = param.Selector(default=None, allow_None=True, @@ -65,8 +65,8 @@ def _process(self, element, key=None): class resample(Operation): - """ - Resamples a timeseries of dates with a frequency and function. + """Resamples a timeseries of dates with a frequency and function. + """ closed = param.Selector(default=None, objects=['left', 'right'], @@ -95,8 +95,7 @@ def _process(self, element, key=None): class rolling_outlier_std(Operation, RollingBase): - """ - Detect outliers using the standard deviation within a rolling window. + """Detect outliers using the standard deviation within a rolling window. Outliers are the array elements outside `sigma` standard deviations from the smoothed trend line, as calculated from the trend line residuals. @@ -104,6 +103,7 @@ class rolling_outlier_std(Operation, RollingBase): The rolling window is controlled by parameters shared with the `rolling` operation via the base class RollingBase, to make it simpler to use the same settings for both. + """ sigma = param.Number(default=2.0, doc=""" diff --git a/holoviews/plotting/__init__.py b/holoviews/plotting/__init__.py index ec1c68d591..ac4e40a41f 100644 --- a/holoviews/plotting/__init__.py +++ b/holoviews/plotting/__init__.py @@ -1,9 +1,9 @@ -""" -HoloViews plotting sub-system the defines the interface to be used by +"""HoloViews plotting sub-system that defines the interface to be used by any third-party plotting/rendering package. This file defines the HTML tags used to wrap rendered output for display in the IPython Notebook (optional). + """ from ..core.options import Compositor, Cycle from ..element import RGB, Area, Image, ImageStack, Polygons, QuadMesh, Raster diff --git a/holoviews/plotting/bokeh/annotation.py b/holoviews/plotting/bokeh/annotation.py index bb5093b37b..3d78fcff5e 100644 --- a/holoviews/plotting/bokeh/annotation.py +++ b/holoviews/plotting/bokeh/annotation.py @@ -246,8 +246,8 @@ def get_data(self, element, ranges, style): return (data, mapping, style) def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ box = Span(level=properties.get('level', 'glyph'), **mapping) plot.renderers.append(box) @@ -296,8 +296,8 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source, data): return super()._update_glyph(renderer, properties, mapping, glyph, source, data) def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ box = BoxAnnotation(level=properties.get('level', 'glyph'), **mapping) plot.renderers.append(box) @@ -325,8 +325,8 @@ def get_data(self, element, ranges, style): return (data, mapping, style) def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ slope = Slope(level=properties.get('level', 'glyph'), **mapping) plot.add_layout(slope) @@ -338,9 +338,9 @@ def get_extents(self, element, ranges=None, range_type='combined', **kwargs): class SplinePlot(ElementPlot, AnnotationPlot): - """ - Draw the supplied Spline annotation (see Spline docstring). + """Draw the supplied Spline annotation (see Spline docstring). Does not support matplotlib Path codes. + """ style_opts = [*line_properties, 'visible'] @@ -426,8 +426,8 @@ def get_data(self, element, ranges, style): def _init_glyph(self, plot, mapping, properties, key): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ properties = {k: v for k, v in properties.items() if 'legend' not in k} @@ -526,8 +526,8 @@ def get_data(self, element, ranges, style): return element.data, {}, style def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): - """ - Initializes a new plot object with the last available frame. + """Initializes a new plot object with the last available frame. + """ # Get element key and ranges for frame element = self.hmap.last @@ -544,9 +544,9 @@ def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): return div def update_frame(self, key, ranges=None, plot=None): - """ - Updates an existing plot with data corresponding + """Updates an existing plot with data corresponding to the key. + """ element = self._get_frame(key) text, _, _ = self.get_data(element, ranges, {}) diff --git a/holoviews/plotting/bokeh/callbacks.py b/holoviews/plotting/bokeh/callbacks.py index 410aafc587..c3d596d26b 100644 --- a/holoviews/plotting/bokeh/callbacks.py +++ b/holoviews/plotting/bokeh/callbacks.py @@ -87,8 +87,7 @@ class Callback: - """ - Provides a baseclass to define callbacks, which return data from + """Provides a baseclass to define callbacks, which return data from bokeh model callbacks, events and attribute changes. The callback then makes this data available to any streams attached to it. @@ -137,6 +136,7 @@ class Callback: of time between events. - debounce: Processes the message only when no new event has been received within the `throttle_timeout` duration. + """ # Attributes to sync @@ -180,9 +180,9 @@ def _transform(self, msg): return msg def _process_msg(self, msg): - """ - Subclassable method to preprocess JSON message in callback + """Subclassable method to preprocess JSON message in callback before passing to stream. + """ return self._transform(msg) @@ -208,10 +208,10 @@ def reset(self): self._queue = [] def _filter_msg(self, msg, ids): - """ - Filter event values that do not originate from the plotting + """Filter event values that do not originate from the plotting handles associated with a particular stream using their ids to match them. + """ filtered_msg = {} for k, v in msg.items(): @@ -252,9 +252,9 @@ async def on_msg(self, msg): stream._metadata = {} def _init_plot_handles(self): - """ - Find all requested plotting handles and cache them along + """Find all requested plotting handles and cache them along with the IDs of the models the callbacks will be attached to. + """ plots = [self.plot] if self.plot.subplots: @@ -274,10 +274,10 @@ def _init_plot_handles(self): return requested def _get_stream_handle_ids(self, handles): - """ - Gather the ids of the plotting handles attached to this callback + """Gather the ids of the plotting handles attached to this callback This allows checking that a stream is not given the state of a plotting handle it wasn't attached to + """ stream_handle_ids = defaultdict(dict) for stream in self.streams: @@ -289,11 +289,11 @@ def _get_stream_handle_ids(self, handles): @classmethod def resolve_attr_spec(cls, spec, cb_obj, model=None): - """ - Resolves a Callback attribute specification looking the + """Resolves a Callback attribute specification looking the corresponding attribute up on the cb_obj, which should be a bokeh model. If not model is supplied cb_obj is assumed to be the same as the model. + """ if not cb_obj: raise AttributeError(f'Bokeh plot attribute {spec} could not be found') @@ -317,8 +317,8 @@ def skip_change(self, msg): return any(skip(msg) for skip in self.skip_changes) def _set_busy(self, busy): - """ - Sets panel.state to busy if available. + """Sets panel.state to busy if available. + """ if 'busy' not in state.param: return # Check if busy state is supported @@ -328,9 +328,9 @@ def _set_busy(self, busy): state.busy = busy async def on_change(self, attr, old, new): - """ - Process change events adding timeout to process multiple concerted + """Process change events adding timeout to process multiple concerted value change at once rather than firing off multiple plot updates. + """ self._queue.append((attr, old, new, time.time())) if not self._active and self.plot.document: @@ -339,9 +339,9 @@ async def on_change(self, attr, old, new): await self.process_on_change() async def on_event(self, event): - """ - Process bokeh UIEvents adding timeout to process multiple concerted + """Process bokeh UIEvents adding timeout to process multiple concerted value change at once rather than firing off multiple plot updates. + """ self._queue.append((event, time.time())) if not self._active and self.plot.document: @@ -350,8 +350,8 @@ async def on_event(self, event): await self.process_on_event() async def process_on_event(self, timeout=None): - """ - Trigger callback change event and triggering corresponding streams. + """Trigger callback change event and triggering corresponding streams. + """ await asyncio.sleep(0.01) if not self._queue: @@ -430,8 +430,8 @@ def _schedule_change(self, attr, old, new): self.plot.document.add_next_tick_callback(partial(self.on_change, attr, old, new)) def set_callback(self, handle): - """ - Set up on_change events for bokeh server interactions. + """Set up on_change events for bokeh server interactions. + """ if self.on_events: for event in self.on_events: @@ -476,8 +476,8 @@ def initialize(self, plot_id=None): class PointerXYCallback(Callback): - """ - Returns the mouse x/y-position on mousemove event. + """Returns the mouse x/y-position on mousemove event. + """ attributes = {'x': 'cb_obj.x', 'y': 'cb_obj.y'} @@ -485,7 +485,9 @@ class PointerXYCallback(Callback): on_events = ['mousemove'] def _process_out_of_bounds(self, value, start, end): - "Clips out of bounds values" + """Clips out of bounds values + + """ if isinstance(value, np.datetime64): v = dt64_to_dt(value) if isinstance(start, (int, float)): @@ -548,16 +550,16 @@ def _process_msg(self, msg): class PointerXCallback(PointerXYCallback): - """ - Returns the mouse x-position on mousemove event. + """Returns the mouse x-position on mousemove event. + """ attributes = {'x': 'cb_obj.x'} class PointerYCallback(PointerXYCallback): - """ - Returns the mouse x/y-position on mousemove event. + """Returns the mouse x/y-position on mousemove event. + """ attributes = {'y': 'cb_obj.y'} @@ -804,11 +806,8 @@ async def _process_selection_partial_event(self): class TapCallback(PopupMixin, PointerXYCallback): - """ - Returns the mouse x/y-position on tap event. + """Returns the mouse x/y-position on tap event. - Note: As of bokeh 0.12.5, there is no way to distinguish the - individual tap events within a doubletap event. """ geom_type = 'point' @@ -816,7 +815,9 @@ class TapCallback(PopupMixin, PointerXYCallback): on_events = ['tap', 'doubletap'] def _process_out_of_bounds(self, value, start, end): - "Sets out of bounds values to None" + """Sets out of bounds values to None + + """ if isinstance(value, np.datetime64): v = dt64_to_dt(value) if isinstance(start, (int, float)): @@ -837,8 +838,8 @@ def _process_out_of_bounds(self, value, start, end): class MultiAxisTapCallback(TapCallback): - """ - Returns the mouse x/y-positions on tap event. + """Returns the mouse x/y-positions on tap event. + """ attributes = {'x': 'cb_obj.x', 'y': 'cb_obj.y'} @@ -886,58 +887,58 @@ def _process_msg(self, msg): class SingleTapCallback(TapCallback): - """ - Returns the mouse x/y-position on tap event. + """Returns the mouse x/y-position on tap event. + """ on_events = ['tap'] class PressUpCallback(TapCallback): - """ - Returns the mouse x/y-position of a pressup mouse event. + """Returns the mouse x/y-position of a pressup mouse event. + """ on_events = ['pressup'] class PanEndCallback(TapCallback): - """ - Returns the mouse x/y-position of a pan end event. + """Returns the mouse x/y-position of a pan end event. + """ on_events = ['panend'] class DoubleTapCallback(TapCallback): - """ - Returns the mouse x/y-position on doubletap event. + """Returns the mouse x/y-position on doubletap event. + """ on_events = ['doubletap'] class MouseEnterCallback(PointerXYCallback): - """ - Returns the mouse x/y-position on mouseenter event, i.e. when + """Returns the mouse x/y-position on mouseenter event, i.e. when mouse enters the plot canvas. + """ on_events = ['mouseenter'] class MouseLeaveCallback(PointerXYCallback): - """ - Returns the mouse x/y-position on mouseleave event, i.e. when + """Returns the mouse x/y-position on mouseleave event, i.e. when mouse leaves the plot canvas. + """ on_events = ['mouseleave'] class RangeXYCallback(Callback): - """ - Returns the x/y-axis ranges of a plot. + """Returns the x/y-axis ranges of a plot. + """ on_events = ['rangesupdate'] @@ -991,8 +992,8 @@ def _process_msg(self, msg): class RangeXCallback(RangeXYCallback): - """ - Returns the x-axis range of a plot. + """Returns the x-axis range of a plot. + """ on_events = ['rangesupdate'] @@ -1006,8 +1007,8 @@ class RangeXCallback(RangeXYCallback): class RangeYCallback(RangeXYCallback): - """ - Returns the y-axis range of a plot. + """Returns the y-axis range of a plot. + """ on_events = ['rangesupdate'] @@ -1021,9 +1022,9 @@ class RangeYCallback(RangeXYCallback): class PlotSizeCallback(Callback): - """ - Returns the actual width and height of a plot once the layout + """Returns the actual width and height of a plot once the layout solver has executed. + """ models = ['plot'] @@ -1059,9 +1060,10 @@ def _process_msg(self, msg): class BoundsCallback(PopupMixin, Callback): + """Returns the bounds of a box_select tool. + """ - Returns the bounds of a box_select tool. - """ + attributes = {'x0': 'cb_obj.geometry.x0', 'x1': 'cb_obj.geometry.x1', 'y0': 'cb_obj.geometry.y0', @@ -1088,9 +1090,9 @@ def _process_msg(self, msg): class SelectionXYCallback(BoundsCallback): - """ - Converts a bounds selection to numeric or categorical x-range + """Converts a bounds selection to numeric or categorical x-range and y-range selections. + """ def _process_msg(self, msg): @@ -1133,8 +1135,8 @@ def _process_msg(self, msg): class BoundsXCallback(Callback): - """ - Returns the bounds of a xbox_select tool. + """Returns the bounds of a xbox_select tool. + """ attributes = {'x0': 'cb_obj.geometry.x0', 'x1': 'cb_obj.geometry.x1'} @@ -1156,8 +1158,8 @@ def _process_msg(self, msg): class BoundsYCallback(Callback): - """ - Returns the bounds of a ybox_select tool. + """Returns the bounds of a ybox_select tool. + """ attributes = {'y0': 'cb_obj.geometry.y0', 'y1': 'cb_obj.geometry.y1'} @@ -1204,8 +1206,8 @@ def _process_msg(self, msg): class Selection1DCallback(PopupMixin, Callback): - """ - Returns the current selection on a ColumnDataSource. + """Returns the current selection on a ColumnDataSource. + """ attributes = {'index': 'cb_obj.indices'} @@ -1345,8 +1347,8 @@ def _process_msg(self, msg): class ResetCallback(Callback): - """ - Signals the Reset stream if an event has been triggered. + """Signals the Reset stream if an event has been triggered. + """ models = ['plot'] @@ -1358,9 +1360,9 @@ def _process_msg(self, msg): class CDSCallback(Callback): - """ - A Stream callback that syncs the data on a bokeh ColumnDataSource + """A Stream callback that syncs the data on a bokeh ColumnDataSource model with Python. + """ attributes = {'data': 'source.data'} @@ -1453,9 +1455,9 @@ def _create_style_callback(self, cds, glyph): cds.js_on_change('data', cb) def _update_cds_vdims(self, data): - """ - Add any value dimensions not already in the data ensuring the + """Add any value dimensions not already in the data ensuring the element can be reconstituted in entirety. + """ element = self.plot.current_frame stream = self.streams[0] @@ -1531,9 +1533,9 @@ def _process_msg(self, msg): return super()._process_msg(msg) def _update_cds_vdims(self, data): - """ - Add any value dimensions not already in the data ensuring the + """Add any value dimensions not already in the data ensuring the element can be reconstituted in entirety. + """ element = self.plot.current_frame for d in element.vdims: @@ -1576,9 +1578,9 @@ def _process_msg(self, msg): return super()._process_msg(msg) def _update_cds_vdims(self, data): - """ - Add any value dimensions not already in the data ensuring the + """Add any value dimensions not already in the data ensuring the element can be reconstituted in entirety. + """ element = self.plot.current_frame stream = self.streams[0] diff --git a/holoviews/plotting/bokeh/chart.py b/holoviews/plotting/bokeh/chart.py index 0ede6f56e1..b3e9ccca29 100644 --- a/holoviews/plotting/bokeh/chart.py +++ b/holoviews/plotting/bokeh/chart.py @@ -509,8 +509,8 @@ def get_data(self, element, ranges, style): def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ ret = super()._init_glyph(plot, mapping, properties) if "field" not in mapping.get("fill_color", {}): @@ -576,8 +576,8 @@ def get_data(self, element, ranges, style): def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ properties = {k: v for k, v in properties.items() if 'legend' not in k} for prop in ['color', 'alpha']: @@ -612,9 +612,9 @@ class SpreadPlot(ElementPlot): _stream_data = False # Plot does not support streaming data def _split_area(self, xs, lower, upper): - """ - Splits area plots at nans and returns x- and y-coordinates for + """Splits area plots at nans and returns x- and y-coordinates for each area separated by nans. + """ xnan = np.array([np.datetime64('nat') if xs.dtype.kind == 'M' else np.nan]) ynan = np.array([np.datetime64('nat') if lower.dtype.kind == 'M' else np.nan]) @@ -737,8 +737,8 @@ def get_data(self, element, ranges, style): class SideSpikesPlot(SpikesPlot): - """ - SpikesPlot with useful defaults for plotting adjoined rug plot. + """SpikesPlot with useful defaults for plotting adjoined rug plot. + """ selected = param.List(default=None, doc=""" @@ -768,10 +768,10 @@ class SideSpikesPlot(SpikesPlot): class BarPlot(BarsMixin, ColorbarPlot, LegendPlot): - """ - BarPlot allows generating single- or multi-category + """BarPlot allows generating single- or multi-category bar Charts, by selecting which key dimensions are mapped onto separate groups, categories and stacks. + """ multi_level = param.Boolean(default=True, doc=""" @@ -828,10 +828,10 @@ def _get_factors(self, element, ranges): return ([], xvals) if self.invert_axes else (xvals, []) def get_stack(self, xvals, yvals, baselines, sign='positive'): - """ - Iterates over a x- and y-values in a stack layer + """Iterates over a x- and y-values in a stack layer and appropriately offsets the layer on top of the previous layer. + """ bottoms, tops = [], [] for x, y in zip(xvals, yvals): diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index eb8b0b66ef..83b46da299 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -507,8 +507,8 @@ def _prepare_hover_kwargs(self, element): return tooltips, hover_opts def _init_tools(self, element, callbacks=None): - """ - Processes the list of tools to be supplied to the plot. + """Processes the list of tools to be supplied to the plot. + """ if callbacks is None: callbacks = [] @@ -637,9 +637,9 @@ def _update_hover(self, element): tool.tooltips = new_hover[0].tooltips def _get_hover_data(self, data, element, dimensions=None): - """ - Initializes hover data based on Element dimension values. + """Initializes hover data based on Element dimension values. If empty initializes with no data. + """ if 'hover' not in self.handles or self.static_source: return @@ -655,10 +655,10 @@ def _get_hover_data(self, data, element, dimensions=None): data[dim] = [v] * len(next(iter(data.values()))) def _shared_axis_range(self, plots, specs, range_type, axis_type, pos): - """ - Given a list of other plots return the shared axis from another + """Given a list of other plots return the shared axis from another plot by matching the dimensions specs stored as tags on the dimensions. Returns None if there is no such axis. + """ dim_range = None categorical = range_type is FactorRange @@ -694,10 +694,10 @@ def _shared_axis_range(self, plots, specs, range_type, axis_type, pos): @property def _subcoord_overlaid(self): - """ - Indicates when the context is a subcoordinate plot, either from within + """Indicates when the context is a subcoordinate plot, either from within the overlay rendering or one of its subplots. Used to skip code paths when rendering an element outside of an overlay. + """ if self._subcoord_standalone_ is not None: return self._subcoord_standalone_ @@ -909,10 +909,10 @@ def _create_extra_axes(self, plots, subplots, element, ranges): return yaxes, ax_specs def _init_plot(self, key, element, plots, ranges=None): - """ - Initializes Bokeh figure to draw Element into and sets basic + """Initializes Bokeh figure to draw Element into and sets basic figure and axis attributes including axes types, labels, titles and plot height and width. + """ subplots = list(self.subplots.values()) if self.subplots else [] @@ -1038,8 +1038,8 @@ def _reset_follow(self, event): stream.trigger(self.streaming) def _plot_properties(self, key, element): - """ - Returns a dictionary of plot properties. + """Returns a dictionary of plot properties. + """ init = 'plot' not in self.handles size_multiplier = self.renderer.size/100. @@ -1078,7 +1078,9 @@ def _plot_properties(self, key, element): return plot_props def _set_active_tools(self, plot): - "Activates the list of active tools" + """Activates the list of active tools + + """ if plot is None or self.toolbar == "disable": return @@ -1138,9 +1140,9 @@ def _populate_axis_handles(self, plot): def _axis_properties(self, axis, key, plot, dimension=None, ax_mapping=None): - """ - Returns a dictionary of axis properties depending + """Returns a dictionary of axis properties depending on the specified axis. + """ # need to copy dictionary by calling dict() on it if ax_mapping is None: @@ -1229,8 +1231,8 @@ def _axis_properties(self, axis, key, plot, dimension=None, return axis_props def _update_plot(self, key, plot, element=None): - """ - Updates plot parameters on every frame + """Updates plot parameters on every frame + """ plot.update(**self._plot_properties(key, element)) if not self.multi_y: @@ -1517,11 +1519,16 @@ def _update_main_ranges(self, element, x_range, y_range, ranges, subcoord=False) def _get_tag(self, model, tag_name): """Get a tag from a Bokeh model - Args: - model (Model): Bokeh model - tag_name (str): Name of tag to get - Returns: - tag_value: Value of tag or False if not found + Parameters + ---------- + model : Model + Bokeh model + tag_name : str + Name of tag to get + + Returns + ------- + tag_value : Value of tag or False if not found """ for tag in model.tags: if isinstance(tag, dict) and tag_name in tag: @@ -1575,9 +1582,9 @@ def _update_range(self, axis_range, low, high, factors, invert, shared, log, str axis_range.trigger(k, old, new) def _setup_autorange(self): - """ - Sets up a callback which will iterate over available data + """Sets up a callback which will iterate over available data renderers and auto-range along one axis. + """ if not isinstance(self, OverlayPlot) and not self.apply_ranges: return @@ -1694,11 +1701,11 @@ def _setup_autorange(self): self._js_on_data_callbacks.append(callback) def _categorize_data(self, data, cols, dims): - """ - Transforms non-string or integer types in datasource if the + """Transforms non-string or integer types in datasource if the axis to be plotted on is categorical. Accepts the column data source data, the columns corresponding to the axes and the dimensions for each axis, changing the data inplace. + """ if self.invert_axes: cols = cols[::-1] @@ -1712,8 +1719,8 @@ def _categorize_data(self, data, cols, dims): def get_aspect(self, xspan, yspan): - """ - Computes the aspect ratio of the plot + """Computes the aspect ratio of the plot + """ if 'plot' in self.handles and self.state.frame_width and self.state.frame_height: return self.state.frame_width/self.state.frame_height @@ -1743,8 +1750,8 @@ def _get_dimension_factors(self, element, ranges, dimension): return [v if values.dtype.kind in 'SU' else dimension.pprint_value(v) for v in values] def _get_factors(self, element, ranges): - """ - Get factors for categorical axes. + """Get factors for categorical axes. + """ xdim, ydim = element.dimensions()[:2] xvals = self._get_dimension_factors(element, ranges, xdim) @@ -1754,8 +1761,8 @@ def _get_factors(self, element, ranges): return coords def _process_legend(self): - """ - Disables legends if show_legend is disabled. + """Disables legends if show_legend is disabled. + """ for l in self.handles['plot'].legend: l.items[:] = [] @@ -1763,8 +1770,8 @@ def _process_legend(self): l.background_fill_alpha = 0 def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ mapping['tags'] = ['apply_ranges' if self.apply_ranges else 'no_apply_ranges'] properties = mpl_to_bokeh(properties) @@ -2059,9 +2066,9 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source, data): def _postprocess_hover(self, renderer, source): - """ - Attaches renderer to hover tool and processes tooltips to + """Attaches renderer to hover tool and processes tooltips to ensure datetime data is displayed correctly. + """ hover = self.handles.get('hover') if hover is None: @@ -2130,8 +2137,8 @@ def _init_glyphs(self, plot, element, ranges, source): self._update_glyph(renderer, properties, mapping, glyph, source, source.data) def _find_axes(self, plot, element): - """ - Looks up the axes and plot ranges given the plot and an element. + """Looks up the axes and plot ranges given the plot and an element. + """ axis_dims = self._get_axis_dims(element)[:2] x, y = axis_dims[::-1] if self.invert_axes else axis_dims @@ -2152,8 +2159,8 @@ def _find_axes(self, plot, element): return (x_axis, y_axis), (x_range, y_range) def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): - """ - Initializes a new plot object with the last available frame. + """Initializes a new plot object with the last available frame. + """ # Get element key and ranges for frame if self.batched: @@ -2212,8 +2219,7 @@ def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): return plot def _apply_hard_bounds(self, element, ranges): - """ - Apply hard bounds to the x and y ranges of the plot. If xlim/ylim is set, limit the + """Apply hard bounds to the x and y ranges of the plot. If xlim/ylim is set, limit the initial viewable range to xlim/ylim, but allow navigation up to the abs max between the data range and xlim/ylim. If dim range is set (e.g. via redim.range), enforce as hard bounds. @@ -2288,8 +2294,8 @@ def _update_glyphs(self, element, ranges, style): def _reset_ranges(self): - """ - Resets RangeXY streams if norm option is set to framewise + """Resets RangeXY streams if norm option is set to framewise + """ # Skipping conditional to temporarily revert fix (see https://github.com/holoviz/holoviews/issues/4396) # This fix caused PlotSize change events to rerender @@ -2308,9 +2314,9 @@ def _reset_ranges(self): @hold_render def update_frame(self, key, ranges=None, plot=None, element=None): - """ - Updates an existing plot with data corresponding + """Updates an existing plot with data corresponding to the key. + """ self._reset_ranges() reused = isinstance(self.hmap, DynamicMap) and (self.overlaid or self.batched) @@ -2377,11 +2383,11 @@ def _execute_hooks(self, element): def model_changed(self, model): - """ - Determines if the bokeh model was just changed on the frontend. + """Determines if the bokeh model was just changed on the frontend. Useful to suppress boomeranging events, e.g. when the frontend just sent an update to the x_range this should not trigger an update on the backend. + """ callbacks = [cb for cbs in self.traverse(lambda x: x.callbacks) for cb in cbs] @@ -2392,11 +2398,11 @@ def model_changed(self, model): @property def framewise(self): - """ - Property to determine whether the current frame should have + """Property to determine whether the current frame should have framewise normalization enabled. Required for bokeh plotting classes to determine whether to send updated ranges for each frame. + """ current_frames = [el for f in self.traverse(lambda x: x.current_frame) for el in (f.traverse(lambda x: x, [Element]) @@ -2415,8 +2421,8 @@ def _draw_scalebar(self, *, plot, renderer): Requires Bokeh 3.4 For scalebar on a subcoordinate_y plot Bokeh 3.6 is needed. - """ + """ if not BOKEH_GE_3_4_0: raise RuntimeError("Scalebar requires Bokeh >= 3.4.0") elif not BOKEH_GE_3_6_0 and self._subcoord_overlaid: @@ -2502,9 +2508,9 @@ def _draw_scalebar(self, *, plot, renderer): class CompositeElementPlot(ElementPlot): - """ - A CompositeElementPlot is an Element plot type that coordinates + """A CompositeElementPlot is an Element plot type that coordinates drawing of multiple glyphs. + """ # Mapping between glyph names and style groups @@ -2609,8 +2615,8 @@ def _update_glyphs(self, element, ranges, style): def _init_glyph(self, plot, mapping, properties, key): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ properties = mpl_to_bokeh(properties) plot_method = '_'.join(key.split('_')[:-1]) @@ -2619,12 +2625,12 @@ def _init_glyph(self, plot, mapping, properties, key): class ColorbarPlot(ElementPlot): - """ - ColorbarPlot provides methods to create colormappers and colorbar + """ColorbarPlot provides methods to create colormappers and colorbar models which can be added to a glyph. Additionally it provides parameters to control the position and other styling options of the colorbar. The default colorbar_position options are defined by the colorbar_specs, but may be overridden by the colorbar_opts. + """ colorbar_specs = {'right': {'pos': 'right', @@ -2957,8 +2963,8 @@ def _get_cmapper_opts(self, low, high, factors, colors): def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object and optionally creates a colorbar. + """Returns a Bokeh glyph object and optionally creates a colorbar. + """ ret = super()._init_glyph(plot, mapping, properties) if self.colorbar: @@ -3039,8 +3045,8 @@ def _process_legend(self, plot=None): class AnnotationPlot: - """ - Mix-in plotting subclass for AnnotationPlots which do not have a legend. + """Mix-in plotting subclass for AnnotationPlots which do not have a legend. + """ @@ -3208,8 +3214,8 @@ def _process_legend(self, overlay): r.muted = self.legend_muted or r.muted def _init_tools(self, element, callbacks=None): - """ - Processes the list of tools to be supplied to the plot. + """Processes the list of tools to be supplied to the plot. + """ if callbacks is None: callbacks = [] @@ -3250,8 +3256,8 @@ def _init_tools(self, element, callbacks=None): return init_tools def _merge_tools(self, subplot): - """ - Merges tools on the overlay with those on the subplots. + """Merges tools on the overlay with those on the subplots. + """ if self.batched and 'hover' in subplot.handles: self.handles['hover'] = subplot.handles['hover'] @@ -3278,8 +3284,8 @@ def _merge_tools(self, subplot): overlay_zoom.renderers = renderers def _postprocess_subcoordinate_y_groups(self, overlay, plot): - """ - Add a zoom tool per group to the overlay. + """Add a zoom tool per group to the overlay. + """ # First, just process and validate the groups and their content. groups = defaultdict(list) @@ -3500,10 +3506,10 @@ def initialize_plot(self, ranges=None, plot=None, plots=None): @hold_render def update_frame(self, key, ranges=None, element=None): - """ - Update the internal state of the Plot to represent the given + """Update the internal state of the Plot to represent the given key tuple (where integers represent frames). Returns this state. + """ self._reset_ranges() reused = isinstance(self.hmap, DynamicMap) and self.overlaid diff --git a/holoviews/plotting/bokeh/geometry.py b/holoviews/plotting/bokeh/geometry.py index 78735b1018..884dd2ff0a 100644 --- a/holoviews/plotting/bokeh/geometry.py +++ b/holoviews/plotting/bokeh/geometry.py @@ -9,9 +9,9 @@ class SegmentPlot(GeomMixin, ColorbarPlot): - """ - Segments are lines in 2D space where each two each dimensions specify a + """Segments are lines in 2D space where each two each dimensions specify a (x, y) node of the line. + """ selected = param.List(default=None, doc=""" diff --git a/holoviews/plotting/bokeh/graphs.py b/holoviews/plotting/bokeh/graphs.py index e469948f0e..b3f4c03e75 100644 --- a/holoviews/plotting/bokeh/graphs.py +++ b/holoviews/plotting/bokeh/graphs.py @@ -250,8 +250,8 @@ def get_data(self, element, ranges, style): def _update_datasource(self, source, data): - """ - Update datasource with data for a new frame. + """Update datasource with data for a new frame. + """ if isinstance(source, ColumnDataSource): if self.handles['static_source']: @@ -262,7 +262,9 @@ def _update_datasource(self, source, data): source.graph_layout = data def _init_filled_edges(self, renderer, properties, edge_mapping): - "Replace edge renderer with filled renderer" + """Replace edge renderer with filled renderer + + """ glyph_model = Patches if self.filled else Bezier allowed_properties = glyph_model.properties() for glyph_type in ('', 'selection_', 'nonselection_', 'hover_', 'muted_'): @@ -277,7 +279,9 @@ def _init_filled_edges(self, renderer, properties, edge_mapping): def _get_graph_properties(self, plot, element, data, mapping, ranges, style): - "Computes the args and kwargs for the GraphRenderer" + """Computes the args and kwargs for the GraphRenderer + + """ sources = [] properties, mappings = {}, {} @@ -315,7 +319,9 @@ def _get_graph_properties(self, plot, element, data, mapping, ranges, style): return (*sources, layout), properties def _reorder_renderers(self, plot, renderer, mapping): - "Reorders renderers based on the defined draw order" + """Reorders renderers based on the defined draw order + + """ renderers = dict({r: self.handles[r+'_glyph_renderer'] for r in mapping}, graph=renderer) other = [r for r in plot.renderers if r not in renderers.values()] @@ -479,8 +485,8 @@ def get_data(self, element, ranges, style): class NodePlot(PointPlot): - """ - Simple subclass of PointPlot which hides x, y position on hover. + """Simple subclass of PointPlot which hides x, y position on hover. + """ def _hover_opts(self, element): diff --git a/holoviews/plotting/bokeh/heatmap.py b/holoviews/plotting/bokeh/heatmap.py index 88f8f0143e..ee1edd8bbc 100644 --- a/holoviews/plotting/bokeh/heatmap.py +++ b/holoviews/plotting/bokeh/heatmap.py @@ -244,11 +244,10 @@ def __init__(self, *args, **kwargs): self.yaxis = None def _get_bins(self, kind, order, reverse=False): - """ - Map elements from given `order` array to bins of start and end values + """Map elements from given `order` array to bins of start and end values for radius or angle dimension. - """ + """ if kind == "radius": start = self.max_radius * self.radius_inner end = self.max_radius @@ -267,24 +266,23 @@ def _get_bins(self, kind, order, reverse=False): @staticmethod def _get_bounds(mapper, values): - """ - Extract first and second value from tuples of mapped bins. - """ + """Extract first and second value from tuples of mapped bins. + """ array = np.array([mapper.get(x) for x in values]) return array[:, 0], array[:, 1] def _postprocess_hover(self, renderer, source): - """ - Limit hover tool to annular wedges only. - """ + """Limit hover tool to annular wedges only. + """ if isinstance(renderer.glyph, AnnularWedge): super()._postprocess_hover(renderer, source) def get_extents(self, view, ranges, range_type='combined', **kwargs): """Supply custom, static extents because radial heatmaps always have the same boundaries. + """ if range_type not in ('data', 'combined'): return (None,)*4 @@ -303,7 +301,6 @@ def _axis_properties(self, *args, **kwargs): missing radial axes in bokeh. """ - return {} def get_default_mapping(self, z, cmapper): @@ -311,7 +308,6 @@ def get_default_mapping(self, z, cmapper): mappings. """ - map_annular = dict(x=self.max_radius, y=self.max_radius, inner_radius="inner_radius", outer_radius="outer_radius", @@ -338,8 +334,8 @@ def get_default_mapping(self, z, cmapper): 'arc_1': map_ymarks} def _pprint(self, element, dim_label, vals): - """ - Helper function to convert values to corresponding dimension type. + """Helper function to convert values to corresponding dimension type. + """ if vals.dtype.kind not in 'SU': dim = element.gridded.get_dimension(dim_label) @@ -348,9 +344,9 @@ def _pprint(self, element, dim_label, vals): return vals def _compute_tick_mapping(self, kind, order, bins): - """ - Helper function to compute tick mappings based on `ticks` and + """Helper function to compute tick mappings based on `ticks` and default orders and bins. + """ if kind == "angle": ticks = self.xticks @@ -373,8 +369,8 @@ def _compute_tick_mapping(self, kind, order, bins): return {x: bins[x] for x in text_nth} def _get_seg_labels_data(self, order_seg, bins_seg): - """ - Generate ColumnDataSource dictionary for segment labels. + """Generate ColumnDataSource dictionary for segment labels. + """ if self.xticks is None: return dict(x=[], y=[], text=[], angle=[]) @@ -396,8 +392,8 @@ def _get_seg_labels_data(self, order_seg, bins_seg): angle=1.5 * np.pi + radiant) def _get_ann_labels_data(self, order_ann, bins_ann): - """ - Generate ColumnDataSource dictionary for annular labels. + """Generate ColumnDataSource dictionary for annular labels. + """ if self.yticks is None: return dict(x=[], y=[], text=[], angle=[]) @@ -418,8 +414,8 @@ def _get_ann_labels_data(self, order_ann, bins_ann): @staticmethod def _get_markers(marks, order, bins): - """ - Helper function to get marker positions depending on mark type. + """Helper function to get marker positions depending on mark type. + """ if callable(marks): markers = [x for x in order if marks(x)] @@ -434,8 +430,8 @@ def _get_markers(marks, order, bins): return np.array([bins[x][1] for x in markers]) def _get_xmarks_data(self, order_seg, bins_seg): - """ - Generate ColumnDataSource dictionary for segment separation lines. + """Generate ColumnDataSource dictionary for segment separation lines. + """ if not self.xmarks: return dict(xs=[], ys=[]) @@ -457,8 +453,8 @@ def _get_xmarks_data(self, order_seg, bins_seg): return dict(xs=list(xs), ys=list(ys)) def _get_ymarks_data(self, order_ann, bins_ann): - """ - Generate ColumnDataSource dictionary for segment separation lines. + """Generate ColumnDataSource dictionary for segment separation lines. + """ if not self.ymarks: return dict(radius=[]) diff --git a/holoviews/plotting/bokeh/hex_tiles.py b/holoviews/plotting/bokeh/hex_tiles.py index 6a34441656..5ed5c010e9 100644 --- a/holoviews/plotting/bokeh/hex_tiles.py +++ b/holoviews/plotting/bokeh/hex_tiles.py @@ -15,11 +15,11 @@ class hex_binning(Operation): - """ - Applies hex binning by computing aggregates on a hexagonal grid. + """Applies hex binning by computing aggregates on a hexagonal grid. Should not be user facing as the returned element is not directly usable. + """ aggregator = param.ClassSelector( diff --git a/holoviews/plotting/bokeh/links.py b/holoviews/plotting/bokeh/links.py index ec3172cb25..85ee7f606a 100644 --- a/holoviews/plotting/bokeh/links.py +++ b/holoviews/plotting/bokeh/links.py @@ -80,9 +80,9 @@ def __init__(self, root_model, link, source_plot, target_plot=None): @classmethod def find_links(cls, root_plot): - """ - Traverses the supplied plot and searches for any Links on + """Traverses the supplied plot and searches for any Links on the plotted objects. + """ plot_fn = lambda x: isinstance(x, (GenericElementPlot, GenericOverlayPlot)) plots = root_plot.traverse(lambda x: x, [plot_fn]) @@ -103,16 +103,20 @@ def find_links(cls, root_plot): @classmethod def find_link(cls, plot, link=None, target=False): - """ - Searches a plot for any Links declared on the sources of the plot. - - Args: - plot: The plot to search for Links - link: A Link instance to check for matches - target: Whether to check against the Link.target - - Returns: - A tuple containing the matched plot and list of matching Links. + """Searches a plot for any Links declared on the sources of the plot. + + Parameters + ---------- + plot + The plot to search for Links + link + A Link instance to check for matches + target + Whether to check against the Link.target + + Returns + ------- + A tuple containing the matched plot and list of matching Links. """ attr = 'target' if target else 'source' if link is None: @@ -136,16 +140,16 @@ def find_link(cls, plot, link=None, target=False): return (plot, links) def validate(self): - """ - Should be subclassed to check if the source and target plots + """Should be subclassed to check if the source and target plots are compatible to perform the linking. + """ class RangeToolLinkCallback(LinkCallback): - """ - Attaches a RangeTool to the source plot and links it to the + """Attaches a RangeTool to the source plot and links it to the specified axes on the target plot + """ def __init__(self, root_model, link, source_plot, target_plot): @@ -229,8 +233,8 @@ def _set_range_for_interval(self, axis, max): class DataLinkCallback(LinkCallback): - """ - Merges the source and target ColumnDataSource + """Merges the source and target ColumnDataSource + """ def __init__(self, root_model, link, source_plot, target_plot): diff --git a/holoviews/plotting/bokeh/path.py b/holoviews/plotting/bokeh/path.py index cebdace513..c4743eb021 100644 --- a/holoviews/plotting/bokeh/path.py +++ b/holoviews/plotting/bokeh/path.py @@ -76,8 +76,8 @@ def _hover_opts(self, element): def _get_hover_data(self, data, element): - """ - Initializes hover data based on Element dimension values. + """Initializes hover data based on Element dimension values. + """ if 'hover' not in self.handles or self.static_source: return @@ -212,9 +212,9 @@ def _hover_opts(self, element): return dims, {} def _get_hover_data(self, data, element): - """ - Initializes hover data based on Element dimension values. + """Initializes hover data based on Element dimension values. If empty initializes with no data. + """ if 'hover' not in self.handles or self.static_source: return @@ -290,8 +290,8 @@ def get_data(self, element, ranges, style): return data, mapping, style def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ plot_method = properties.pop('plot_method', None) properties = mpl_to_bokeh(properties) diff --git a/holoviews/plotting/bokeh/plot.py b/holoviews/plotting/bokeh/plot.py index 9ac62b0fcb..51132815a1 100644 --- a/holoviews/plotting/bokeh/plot.py +++ b/holoviews/plotting/bokeh/plot.py @@ -66,9 +66,9 @@ class BokehPlot(DimensionedPlot, CallbackPlot): - """ - Plotting baseclass for the Bokeh backends, implementing the basic + """Plotting baseclass for the Bokeh backends, implementing the basic plotting interface for Bokeh based plots. + """ shared_datasource = param.Boolean(default=True, doc=""" @@ -118,11 +118,11 @@ def id(self): def get_data(self, element, ranges, style): - """ - Returns the data from an element in the appropriate format for + """Returns the data from an element in the appropriate format for initializing or updating a ColumnDataSource and a dictionary which maps the expected keywords arguments of a glyph to the column in the datasource. + """ raise NotImplementedError @@ -136,8 +136,8 @@ def _update_selected(self, cds): s.update(index=self.selected) def _init_datasource(self, data): - """ - Initializes a data source to be passed into the bokeh glyph. + """Initializes a data source to be passed into the bokeh glyph. + """ data = self._postprocess_data(data) cds = ColumnDataSource(data=data) @@ -147,9 +147,9 @@ def _init_datasource(self, data): def _postprocess_data(self, data): - """ - Applies necessary type transformation to the data before + """Applies necessary type transformation to the data before it is set on a ColumnDataSource. + """ new_data = {} for k, values in data.items(): @@ -169,8 +169,8 @@ def _postprocess_data(self, data): def _update_datasource(self, source, data): - """ - Update datasource with data for a new frame. + """Update datasource with data for a new frame. + """ data = self._postprocess_data(data) empty = all(len(v) == 0 for v in data.values()) @@ -198,18 +198,18 @@ def _update_datasource(self, source, data): @property def state(self): - """ - The plotting state that gets updated via the update method and + """The plotting state that gets updated via the update method and used by the renderer to generate output. + """ return self.handles['plot'] @property def current_handles(self): - """ - Should return a list of plot objects that have changed and + """Should return a list of plot objects that have changed and should be updated. + """ return [] @@ -233,10 +233,10 @@ def _get_fontsize_defaults(self): def cleanup(self): - """ - Cleans up references to the plot after the plot has been + """Cleans up references to the plot after the plot has been deleted. Traverses through all plots cleaning up Callbacks and Stream subscribers. + """ plots = self.traverse(lambda x: x, [BokehPlot]) for plot in plots: @@ -264,9 +264,9 @@ def cleanup(self): ] def _fontsize(self, key, label='fontsize', common=True): - """ - Converts integer fontsizes to a string specifying + """Converts integer fontsizes to a string specifying fontsize in pt. + """ size = super()._fontsize(key, label, common) return {k: v if isinstance(v, str) else f'{v}pt' @@ -306,9 +306,9 @@ def _get_title_div(self, key, default_fontsize='15pt', width=450): return title_div def sync_sources(self): - """ - Syncs data sources between Elements, which draw data + """Syncs data sources between Elements, which draw data from the same object. + """ get_sources = lambda x: (id(x.current_frame.data), x) filter_fn = lambda x: (x.shared_datasource and x.current_frame is not None and @@ -361,10 +361,10 @@ def init_links(self): class CompositePlot(BokehPlot): - """ - CompositePlot is an abstract baseclass for plot types that draw + """CompositePlot is an abstract baseclass for plot types that draw render multiple axes. It implements methods to add an overall title to such a plot. + """ sizing_mode = param.Selector(default=None, objects=[ @@ -415,9 +415,9 @@ class CompositePlot(BokehPlot): {'title': '15pt'}""") def _link_dimensioned_streams(self): - """ - Should perform any linking required to update titles when dimensioned + """Should perform any linking required to update titles when dimensioned streams change. + """ streams = [s for s in self.streams if any(k in self.dimensions for k in s.contents)] for s in streams: @@ -431,18 +431,18 @@ def _stream_update(self, **kwargs): @property def current_handles(self): - """ - Should return a list of plot objects that have changed and + """Should return a list of plot objects that have changed and should be updated. + """ return [self.handles['title']] if 'title' in self.handles else [] class GridPlot(CompositePlot, GenericCompositePlot): - """ - Plot a group of elements in a grid layout based on a GridSpace element + """Plot a group of elements in a grid layout based on a GridSpace element object. + """ axis_offset = param.Integer(default=50, doc=""" @@ -685,10 +685,10 @@ def _make_axes(self, plot): @update_shared_sources def update_frame(self, key, ranges=None): - """ - Update the internal state of the Plot to represent the given + """Update the internal state of the Plot to represent the given key tuple (where integers represent frames). Returns this state. + """ ranges = self.compute_ranges(self.layout, key, ranges) for coord in self.layout.keys(full_grid=True): @@ -805,12 +805,12 @@ def _init_layout(self, layout): def _create_subplots(self, layout, positions, layout_dimensions, ranges, num=0): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ subplots = {} adjoint_clone = layout.clone(shared_data=False, id=layout.id) @@ -879,9 +879,9 @@ def _create_subplots(self, layout, positions, layout_dimensions, ranges, num=0): def _compute_grid(self): - """ - Computes an empty grid to position the plots on by expanding + """Computes an empty grid to position the plots on by expanding any AdjointLayouts into multiple rows and columns. + """ widths = [] for c in range(self.cols): @@ -1042,10 +1042,10 @@ def initialize_plot(self, plots=None, ranges=None): @update_shared_sources def update_frame(self, key, ranges=None): - """ - Update the internal state of the Plot to represent the given + """Update the internal state of the Plot to represent the given key tuple (where integers represent frames). Returns this state. + """ ranges = self.compute_ranges(self.layout, key, ranges) for r, c in self.coords: @@ -1073,12 +1073,12 @@ def __init__(self, layout, layout_type, subplots, **params): super().__init__(subplots=subplots, **params) def initialize_plot(self, ranges=None, plots=None): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ if plots is None: plots = [] diff --git a/holoviews/plotting/bokeh/raster.py b/holoviews/plotting/bokeh/raster.py index baaf8816eb..be0393856e 100644 --- a/holoviews/plotting/bokeh/raster.py +++ b/holoviews/plotting/bokeh/raster.py @@ -522,14 +522,14 @@ def get_data(self, element, ranges, style): return data, mapping, style def _collect_hover_data(self, element, mask=(), irregular=False): - """ - Returns a dict mapping hover dimension names to flattened arrays. + """Returns a dict mapping hover dimension names to flattened arrays. Note that `Quad` glyphs are used when given 1-D coords but `Patches` are used for "irregular" 2-D coords, and Bokeh inserts data into these glyphs in the opposite order such that the relationship b/w the `invert_axes` parameter and the need to transpose the arrays before flattening is reversed. + """ transpose = self.invert_axes if irregular else not self.invert_axes @@ -543,8 +543,8 @@ def _collect_hover_data(self, element, mask=(), irregular=False): return hover_data def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ properties = mpl_to_bokeh(properties) properties = dict(properties, **mapping) diff --git a/holoviews/plotting/bokeh/renderer.py b/holoviews/plotting/bokeh/renderer.py index 9f6af2d9bf..6e5aa48e99 100644 --- a/holoviews/plotting/bokeh/renderer.py +++ b/holoviews/plotting/bokeh/renderer.py @@ -55,15 +55,17 @@ class BokehRenderer(Renderer): @bothmethod def _save_prefix(self_or_cls, ext): - "Hook to prefix content for instance JS when saving HTML" + """Hook to prefix content for instance JS when saving HTML + + """ return @bothmethod def get_plot(self_or_cls, obj, doc=None, renderer=None, **kwargs): - """ - Given a HoloViews Viewable return a corresponding plot instance. + """Given a HoloViews Viewable return a corresponding plot instance. Allows supplying a document attach the plot to, useful when combining the bokeh model with another plot. + """ plot = super().get_plot(obj, doc, renderer, **kwargs) if plot.document is None: @@ -73,10 +75,10 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, **kwargs): return plot def _figure_data(self, plot, fmt, doc=None, as_script=False, **kwargs): - """ - Given a plot instance, an output format and an optional bokeh + """Given a plot instance, an output format and an optional bokeh document, return the corresponding data. If as_script is True, the content will be split in an HTML and a JS component. + """ model = plot.state if doc is None: @@ -143,8 +145,7 @@ def _figure_data(self, plot, fmt, doc=None, as_script=False, **kwargs): @classmethod def plot_options(cls, obj, percent_size): - """ - Given a holoviews object and a percentage size, apply heuristics + """Given a holoviews object and a percentage size, apply heuristics to compute a suitable figure size. For instance, scaling layouts and grids linearly can result in unwieldy figure sizes when there are a large number of elements. As ad hoc heuristics are used, @@ -154,6 +155,7 @@ def plot_options(cls, obj, percent_size): Used by the IPython Notebook display hooks and the save utility. Note that this can be overridden explicitly per object using the fig_size and size plot options. + """ obj = obj.last if isinstance(obj, HoloMap) else obj plot = Store.registry[cls.backend].get(type(obj), None) @@ -171,12 +173,12 @@ def plot_options(cls, obj, percent_size): @bothmethod def get_size(self_or_cls, plot): - """ - Return the display size associated with a plot before + """Return the display size associated with a plot before rendering to any particular format. Used to generate appropriate HTML display. Returns a tuple of (width, height) in pixels. + """ if isinstance(plot, Plot): plot = plot.state diff --git a/holoviews/plotting/bokeh/sankey.py b/holoviews/plotting/bokeh/sankey.py index fd3b03a7a4..613823ccf8 100644 --- a/holoviews/plotting/bokeh/sankey.py +++ b/holoviews/plotting/bokeh/sankey.py @@ -105,8 +105,8 @@ def _sync_nodes(self): arc_glyph.update(**styles) def _compute_quads(self, element, data, mapping): - """ - Computes the node quad glyph data.x + """Computes the node quad glyph data.x + """ quad_mapping = {'left': 'x0', 'right': 'x1', 'bottom': 'y0', 'top': 'y1'} quad_data = dict(data['scatter_1']) @@ -121,8 +121,8 @@ def _compute_quads(self, element, data, mapping): mapping['quad_1'] = quad_mapping def _compute_labels(self, element, data, mapping): - """ - Computes labels for the nodes and adds it to the data. + """Computes labels for the nodes and adds it to the data. + """ if element.vdims: edges = Dataset(element)[element[element.vdims[0].name]>0] @@ -211,8 +211,8 @@ def _compute_labels(self, element, data, mapping): text_baseline='middle', text_align=align) def _patch_hover(self, element, data): - """ - Replace edge start and end hover data with label_index data. + """Replace edge start and end hover data with label_index data. + """ if not (self.inspection_policy == 'edges' and 'hover' in self.handles): return @@ -227,7 +227,9 @@ def _patch_hover(self, element, data): data['patches_1'][tgt] = [lookup.get(v, v) for v in tgt_vals] def get_extents(self, element, ranges, range_type='combined', **kwargs): - """Return the extents of the Sankey box""" + """Return the extents of the Sankey box + + """ if range_type == 'extents': return element.nodes.extents xdim, ydim = element.nodes.kdims[:2] diff --git a/holoviews/plotting/bokeh/selection.py b/holoviews/plotting/bokeh/selection.py index b8a0d586e8..c9d26fa5d4 100644 --- a/holoviews/plotting/bokeh/selection.py +++ b/holoviews/plotting/bokeh/selection.py @@ -25,8 +25,8 @@ def build_selection(self, selection_streams, hvobj, operations, region_stream=No class BokehOverlaySelectionDisplay(OverlaySelectionDisplay): - """ - Overlay selection display subclass for use with bokeh backend + """Overlay selection display subclass for use with bokeh backend + """ def _build_element_layer(self, element, layer_color, layer_alpha, **opts): diff --git a/holoviews/plotting/bokeh/stats.py b/holoviews/plotting/bokeh/stats.py index 57b2a526ca..45cbcb7b11 100644 --- a/holoviews/plotting/bokeh/stats.py +++ b/holoviews/plotting/bokeh/stats.py @@ -28,8 +28,8 @@ class DistributionPlot(AreaPlot): - """ - DistributionPlot visualizes a distribution of values as a KDE. + """DistributionPlot visualizes a distribution of values as a KDE. + """ bandwidth = param.Number(default=None, doc=""" @@ -45,11 +45,11 @@ class DistributionPlot(AreaPlot): class BivariatePlot(PolygonPlot): - """ - Bivariate plot visualizes two-dimensional kernel density + """Bivariate plot visualizes two-dimensional kernel density estimates. Additionally, by enabling the joint option, the marginals distributions can be plotted alongside each axis (does not animate or compose). + """ bandwidth = param.Number(default=None, doc=""" @@ -117,8 +117,8 @@ def _apply_transforms(self, element, data, ranges, style, group=None): return super()._apply_transforms(element, data, ranges, style, group) def _get_factors(self, element, ranges): - """ - Get factors for categorical axes. + """Get factors for categorical axes. + """ if not element.kdims: xfactors, yfactors = [element.label], [] @@ -372,8 +372,8 @@ def _get_axis_dims(self, element): return kdims, element.vdims[0] def _get_factors(self, element, ranges): - """ - Get factors for categorical axes. + """Get factors for categorical axes. + """ split_dim = dim(self.split) if isinstance(self.split, str) else self.split kdims = [kd for kd in element.kdims if not split_dim or kd != split_dim.dimension] diff --git a/holoviews/plotting/bokeh/styles.py b/holoviews/plotting/bokeh/styles.py index 6294ca9fc1..585e95df89 100644 --- a/holoviews/plotting/bokeh/styles.py +++ b/holoviews/plotting/bokeh/styles.py @@ -1,5 +1,5 @@ -""" -Defines valid style options, validation and utilities +"""Defines valid style options, validation and utilities + """ import numpy as np @@ -73,10 +73,10 @@ def mpl_to_bokeh(properties): - """ - Utility to process style properties converting any + """Utility to process style properties converting any matplotlib specific options to their nearest bokeh equivalent. + """ new_properties = {} for k, v in properties.items(): @@ -127,20 +127,19 @@ def get_validator(style): def validate(style, value, scalar=False): - """ - Validates a style and associated value. + """Validates a style and associated value. - Arguments - --------- - style: str + Parameters + ---------- + style : str The style to validate (e.g. 'color', 'size' or 'marker') - value: + value : The style value to validate - scalar: bool + scalar : bool Returns ------- - valid: boolean or None + valid : boolean or None If validation is supported returns boolean, otherwise None """ validator = get_validator(style) @@ -155,8 +154,8 @@ def validate(style, value, scalar=False): # Utilities def rgba_tuple(rgba): - """ - Ensures RGB(A) tuples in the range 0-1 are scaled to 0-255. + """Ensures RGB(A) tuples in the range 0-1 are scaled to 0-255. + """ if isinstance(rgba, tuple): return tuple(int(c*255) if i<3 else c for i, c in enumerate(rgba)) @@ -165,11 +164,11 @@ def rgba_tuple(rgba): def expand_batched_style(style, opts, mapping, nvals): - """ - Computes styles applied to a batched plot by iterating over the + """Computes styles applied to a batched plot by iterating over the supplied list of style options and expanding any options found in the supplied style dictionary returning a data and mapping defining the data that should be added to the ColumnDataSource. + """ opts = sorted(opts, key=lambda x: x in ['color', 'alpha']) applied_styles = set(mapping) diff --git a/holoviews/plotting/bokeh/tabular.py b/holoviews/plotting/bokeh/tabular.py index 58237f0cf9..e0776fc368 100644 --- a/holoviews/plotting/bokeh/tabular.py +++ b/holoviews/plotting/bokeh/tabular.py @@ -58,8 +58,8 @@ def get_data(self, element, ranges, style): for d in element.dimensions()}, {}, style) def initialize_plot(self, ranges=None, plot=None, plots=None, source=None): - """ - Initializes a new plot object with the last available frame. + """Initializes a new plot object with the last available frame. + """ # Get element key and ranges for frame element = self.hmap.last @@ -123,9 +123,9 @@ def _get_columns(self, element, data): def update_frame(self, key, ranges=None, plot=None): - """ - Updates an existing plot with data corresponding + """Updates an existing plot with data corresponding to the key. + """ element = self._get_frame(key) self.param.update(**self.lookup_options(element, 'plot').options) diff --git a/holoviews/plotting/bokeh/tiles.py b/holoviews/plotting/bokeh/tiles.py index 51b8b274d1..8a13b4e556 100644 --- a/holoviews/plotting/bokeh/tiles.py +++ b/holoviews/plotting/bokeh/tiles.py @@ -64,8 +64,8 @@ def _update_glyph(self, renderer, properties, mapping, glyph, source=None, data= if k in renderer.properties()}) def _init_glyph(self, plot, mapping, properties): - """ - Returns a Bokeh glyph object. + """Returns a Bokeh glyph object. + """ tile_source = mapping['tile_source'] level = properties.pop('level', 'glyph') diff --git a/holoviews/plotting/bokeh/util.py b/holoviews/plotting/bokeh/util.py index 685a6e3bcd..369c665b74 100644 --- a/holoviews/plotting/bokeh/util.py +++ b/holoviews/plotting/bokeh/util.py @@ -110,16 +110,16 @@ def convert_timestamp(timestamp): - """ - Converts bokehJS timestamp to datetime64. + """Converts bokehJS timestamp to datetime64. + """ datetime = dt.datetime.fromtimestamp(timestamp/1000, tz=dt.timezone.utc) return np.datetime64(datetime.replace(tzinfo=None)) def prop_is_none(value): - """ - Checks if property value is None. + """Checks if property value is None. + """ return (value is None or (isinstance(value, dict) and 'value' in value @@ -127,9 +127,9 @@ def prop_is_none(value): def decode_bytes(array): - """ - Decodes an array, list or tuple of bytestrings to avoid python 3 + """Decodes an array, list or tuple of bytestrings to avoid python 3 bokeh serialization errors + """ if (not len(array) or (isinstance(array, arraylike_types) and array.dtype.kind != 'O')): return array @@ -142,8 +142,8 @@ def decode_bytes(array): def layout_padding(plots, renderer): - """ - Pads Nones in a list of lists of plots with empty plots. + """Pads Nones in a list of lists of plots with empty plots. + """ widths, heights = defaultdict(int), defaultdict(int) for r, row in enumerate(plots): @@ -167,9 +167,9 @@ def layout_padding(plots, renderer): def compute_plot_size(plot): - """ - Computes the size of bokeh models that make up a layout such as + """Computes the size of bokeh models that make up a layout such as figures, rows, columns, and Plot. + """ if isinstance(plot, (GridBox, GridPlot)): ndmapping = NdMapping({(x, y): fig for fig, y, x in plot.children}, kdims=['x', 'y']) @@ -211,26 +211,38 @@ def compute_layout_properties( width, height, frame_width, frame_height, explicit_width, explicit_height, aspect, data_aspect, responsive, size_multiplier, logger=None): - """ - Utility to compute the aspect, plot width/height and sizing_mode + """Utility to compute the aspect, plot width/height and sizing_mode behavior. - Args: - width (int): Plot width - height (int): Plot height - frame_width (int): Plot frame width - frame_height (int): Plot frame height - explicit_width (list): List of user supplied widths - explicit_height (list): List of user supplied heights - aspect (float): Plot aspect - data_aspect (float): Scaling between x-axis and y-axis ranges - responsive (boolean): Whether the plot should resize responsively - size_multiplier (float): Multiplier for supplied plot dimensions - logger (param.Parameters): Parameters object to issue warnings on - - Returns: - Returns two dictionaries one for the aspect and sizing modes, - and another for the plot dimensions. + Parameters + ---------- + width : int + Plot width + height : int + Plot height + frame_width : int + Plot frame width + frame_height : int + Plot frame height + explicit_width : list + List of user supplied widths + explicit_height : list + List of user supplied heights + aspect : float + Plot aspect + data_aspect : float + Scaling between x-axis and y-axis ranges + responsive : boolean + Whether the plot should resize responsively + size_multiplier : float + Multiplier for supplied plot dimensions + logger : param.Parameters + Parameters object to issue warnings on + + Returns + ------- + Returns two dictionaries one for the aspect and sizing modes, + and another for the plot dimensions. """ fixed_width = (explicit_width or frame_width) fixed_height = (explicit_height or frame_height) @@ -389,11 +401,11 @@ def compute_layout_properties( def merge_tools(plot_grid, *, disambiguation_properties=None, hide_toolbar=False): - """ - Merges tools defined on a grid of plots into a single toolbar. + """Merges tools defined on a grid of plots into a single toolbar. All tools of the same type are merged unless they define one of the disambiguation properties. By default `name`, `icon`, `tags` and `description` can be used to prevent tools from being merged. + """ tools = [] for row in plot_grid: @@ -469,7 +481,7 @@ def sync_legends(bokeh_layout): def select_legends(holoviews_layout, figure_index=None, legend_position="top_right"): - """ Only displays selected legends in plot layout. + """Only displays selected legends in plot layout. Parameters ---------- @@ -507,8 +519,8 @@ def select_legends(holoviews_layout, figure_index=None, legend_position="top_rig @contextmanager def silence_warnings(*warnings): - """ - Context manager for silencing bokeh validation warnings. + """Context manager for silencing bokeh validation warnings. + """ for warning in warnings: silence(warning) @@ -520,15 +532,15 @@ def silence_warnings(*warnings): def empty_plot(width, height): - """ - Creates an empty and invisible plot of the specified size. + """Creates an empty and invisible plot of the specified size. + """ return Spacer(width=width, height=height) def remove_legend(plot, legend): - """ - Removes a legend from a bokeh plot. + """Removes a legend from a bokeh plot. + """ valid_places = ['left', 'right', 'above', 'below', 'center'] plot.legend[:] = [l for l in plot.legend if l is not legend] @@ -539,8 +551,8 @@ def remove_legend(plot, legend): def font_size_to_pixels(size): - """ - Convert a fontsize to a pixel value + """Convert a fontsize to a pixel value + """ if size is None or not isinstance(size, str): return @@ -621,9 +633,9 @@ def make_axis(axis, size, factors, dim, flip=False, rotation=0, def hsv_to_rgb(hsv): - """ - Vectorized HSV to RGB conversion, adapted from: - http://stackoverflow.com/questions/24852345/hsv-to-rgb-color-conversion + """Vectorized HSV to RGB conversion, adapted from: + https://stackoverflow.com/questions/24852345/hsv-to-rgb-color-conversion + """ h, s, v = (hsv[..., i] for i in range(3)) shape = h.shape @@ -651,9 +663,9 @@ def hsv_to_rgb(hsv): def pad_width(model, table_padding=0.85, tabs_padding=1.2): - """ - Computes the width of a model and sets up appropriate padding + """Computes the width of a model and sets up appropriate padding for Tabs and DataTable types. + """ if isinstance(model, Row): vals = [pad_width(child) for child in model.children] @@ -680,10 +692,10 @@ def pad_width(model, table_padding=0.85, tabs_padding=1.2): def pad_plots(plots): - """ - Accepts a grid of bokeh plots in form of a list of lists and + """Accepts a grid of bokeh plots in form of a list of lists and wraps any DataTable or Tabs in a Column with appropriate padding. Required to avoid overlap in gridplot. + """ widths = [] for row in plots: @@ -699,9 +711,9 @@ def pad_plots(plots): def filter_toolboxes(plots): - """ - Filters out toolboxes out of a list of plots to be able to compose + """Filters out toolboxes out of a list of plots to be able to compose them into a larger plot. + """ if isinstance(plots, list): plots = [filter_toolboxes(plot) for plot in plots] @@ -714,9 +726,9 @@ def filter_toolboxes(plots): def get_tab_title(key, frame, overlay): - """ - Computes a title for bokeh tabs from the key in the overlay, the + """Computes a title for bokeh tabs from the key in the overlay, the element and the containing (Nd)Overlay. + """ if isinstance(overlay, Overlay): if frame is not None: @@ -737,8 +749,8 @@ def get_tab_title(key, frame, overlay): def get_default(model, name, theme=None): - """ - Looks up the default value for a bokeh model property. + """Looks up the default value for a bokeh model property. + """ overrides = None if theme is not None: @@ -750,10 +762,10 @@ def get_default(model, name, theme=None): def filter_batched_data(data, mapping): - """ - Iterates over the data and mapping for a ColumnDataSource and + """Iterates over the data and mapping for a ColumnDataSource and replaces columns with repeating values with a scalar. This is purely and optimization for scalar types. + """ for k, v in list(mapping.items()): if isinstance(v, dict) and 'field' in v: @@ -771,10 +783,10 @@ def filter_batched_data(data, mapping): pass def cds_column_replace(source, data): - """ - Determine if the CDS.data requires a full replacement or simply + """Determine if the CDS.data requires a full replacement or simply needs to be updated. A replacement is required if untouched columns are not the same length as the columns being updated. + """ current_length = [len(v) for v in source.data.values() if isinstance(v, (list, *arraylike_types))] @@ -785,8 +797,8 @@ def cds_column_replace(source, data): @contextmanager def hold_policy(document, policy, server=False): - """ - Context manager to temporary override the hold policy. + """Context manager to temporary override the hold policy. + """ old_policy = document.callbacks.hold_value document.callbacks._hold = policy @@ -800,10 +812,10 @@ def hold_policy(document, policy, server=False): def recursive_model_update(model, props): - """ - Recursively updates attributes on a model including other + """Recursively updates attributes on a model including other models. If the type of the new model matches the old model properties are simply updated, otherwise the model is replaced. + """ updates = {} valid_properties = model.properties_with_values() @@ -829,12 +841,12 @@ def recursive_model_update(model, props): def update_shared_sources(f): - """ - Context manager to ensures data sources shared between multiple + """Context manager to ensures data sources shared between multiple plots are cleared and updated appropriately avoiding warnings and allowing empty frames on subplots. Expects a list of shared_sources and a mapping of the columns expected columns for each source in the plots handles. + """ def wrapper(self, *args, **kwargs): source_cols = self.handles.get('source_cols', {}) @@ -859,9 +871,9 @@ def wrapper(self, *args, **kwargs): def hold_render(f): - """ - Decorator that will hold render on a Bokeh ElementPlot until after + """Decorator that will hold render on a Bokeh ElementPlot until after the method has been called. + """ def wrapper(self, *args, **kwargs): hold = self.state.hold_render @@ -882,18 +894,18 @@ def wrapper(self, *args, **kwargs): def categorize_array(array, dim): - """ - Uses a Dimension instance to convert an array of values to categorical + """Uses a Dimension instance to convert an array of values to categorical (i.e. string) values and applies escaping for colons, which bokeh treats as a categorical suffix. + """ return np.array([dim.pprint_value(x) for x in array]) class periodic: - """ - Mocks the API of periodic Thread in hv.core.util, allowing a smooth + """Mocks the API of periodic Thread in hv.core.util, allowing a smooth API transition on bokeh server. + """ def __init__(self, document): @@ -957,8 +969,8 @@ def __str__(self): def attach_periodic(plot): - """ - Attaches plot refresh to all streams on the object. + """Attaches plot refresh to all streams on the object. + """ def append_refresh(dmap): for subdmap in get_nested_dmaps(dmap): @@ -974,11 +986,13 @@ def date_to_integer(date): If datetime is a cftime with a non-standard calendar the caveats described in hv.core.util.cftime_to_timestamp apply. - Args: - date: Date- or datetime-like object + Parameters + ---------- + date : Date- or datetime-like object - Returns: - Milliseconds since 1970-01-01 00:00:00 + Returns + ------- + Milliseconds since 1970-01-01 00:00:00 """ if isinstance(date, pd.Timestamp): try: @@ -999,12 +1013,12 @@ def date_to_integer(date): def glyph_order(keys, draw_order=None): - """ - Orders a set of glyph handles using regular sort and an explicit + """Orders a set of glyph handles using regular sort and an explicit sort order. The explicit draw order must take the form of a list of glyph names while the keys should be glyph names with a custom suffix. The draw order may only match subset of the keys and any matched items will take precedence over other entries. + """ if draw_order is None: draw_order = [] @@ -1017,9 +1031,9 @@ def order_fn(glyph): def colormesh(X, Y): - """ - Generates line paths for a quadmesh given 2D arrays of X and Y + """Generates line paths for a quadmesh given 2D arrays of X and Y coordinates. + """ X1 = X[0:-1, 0:-1].ravel() Y1 = Y[0:-1, 0:-1].ravel() @@ -1045,10 +1059,10 @@ def theme_attr_json(theme, attr): def multi_polygons_data(element): - """ - Expands polygon data which contains holes to a bokeh multi_polygons + """Expands polygon data which contains holes to a bokeh multi_polygons representation. Multi-polygons split by nans are expanded and the correct list of holes is assigned to each sub-polygon. + """ xs, ys = (element.dimension_values(kd, expanded=False) for kd in element.kdims) holes = element.holes() @@ -1077,6 +1091,7 @@ def match_dim_specs(specs1, specs2): to each dimension, each tuple spec has the form (name, label, unit). The name and label must match exactly while the unit only has to match if both specs define one. + """ if (specs1 is None or specs2 is None) or (len(specs1) != len(specs2)): return False @@ -1131,8 +1146,8 @@ def get_axis_class(axis_type, range_input, dim): # Copied from bokeh def match_ax_type(ax, range_type): - """ - Ensure the range_type matches the axis model being matched. + """Ensure the range_type matches the axis model being matched. + """ if isinstance(ax, CategoricalAxis): return range_type == 'categorical' @@ -1143,7 +1158,9 @@ def match_ax_type(ax, range_type): def match_yaxis_type_to_range(yax, range_type, range_name): - "Apply match_ax_type to the y-axis found by the given range name " + """Apply match_ax_type to the y-axis found by the given range name + + """ for axis in yax: if axis.y_range_name == range_name: return match_ax_type(axis, range_type) @@ -1151,9 +1168,9 @@ def match_yaxis_type_to_range(yax, range_type, range_name): def wrap_formatter(formatter, axis): - """ - Wraps formatting function or string in + """Wraps formatting function or string in appropriate bokeh formatter type. + """ if isinstance(formatter, TickFormatter): pass @@ -1163,10 +1180,9 @@ def wrap_formatter(formatter, axis): def property_to_dict(x): - """ - Convert Bokeh's property Field and Value to a dictionary - """ + """Convert Bokeh's property Field and Value to a dictionary + """ try: from bokeh.core.property.vectorization import Field, Unspecified, Value diff --git a/holoviews/plotting/links.py b/holoviews/plotting/links.py index 25289f0200..2b79141811 100644 --- a/holoviews/plotting/links.py +++ b/holoviews/plotting/links.py @@ -7,8 +7,7 @@ class Link(param.Parameterized): - """ - A Link defines some connection between a source and target object + """A Link defines some connection between a source and target object in their visualization. It is quite similar to a Stream as it allows defining callbacks in response to some change or event on the source object, however, unlike a Stream, it does not transfer @@ -21,6 +20,7 @@ class Link(param.Parameterized): A Link must define a source object which is what triggers events, but must not define a target. It is also possible to define bi- directional links between the source and target object. + """ # Mapping from a source id to a Link instance @@ -47,9 +47,9 @@ def __init__(self, source, target=None, **params): @classmethod def register_callback(cls, backend, callback): - """ - Register a LinkCallback providing the implementation for + """Register a LinkCallback providing the implementation for the Link for a particular backend. + """ cls._callbacks[backend][cls] = callback @@ -62,8 +62,8 @@ def target(self): return self._target() if self._target else None def link(self): - """ - Registers the Link + """Registers the Link + """ if self.source in self.registry: links = self.registry[self.source] @@ -80,8 +80,8 @@ def link(self): self.registry[self.source] = [self] def unlink(self): - """ - Unregisters the Link + """Unregisters the Link + """ links = self.registry.get(self.source) if self in links: @@ -89,15 +89,15 @@ def unlink(self): class RangeToolLink(Link): - """ - The RangeToolLink sets up a link between a RangeTool on the source + """The RangeToolLink sets up a link between a RangeTool on the source plot and the axes on the target plot. It is useful for exploring a subset of a larger dataset in more detail. By default it will link along the x-axis but using the axes parameter both axes may be linked to the tool. Example of how to use RangeToolLink can be found here: - https://www.holoviews.org/gallery/demos/bokeh/timeseries_range_tool.html + https ://www.holoviews.org/gallery/demos/bokeh/timeseries_range_tool.html + """ axes = param.ListSelector(default=['x'], objects=['x', 'y'], doc=""" @@ -128,27 +128,27 @@ class RangeToolLink(Link): _requires_target = True class DataLink(Link): - """ - DataLink defines a link in the data between two objects allowing + """DataLink defines a link in the data between two objects allowing them to be selected together. In order for a DataLink to be established the source and target data must be of the same length. + """ _requires_target = True class SelectionLink(Link): - """ - Links the selection between two glyph renderers. + """Links the selection between two glyph renderers. + """ _requires_target = True class VertexTableLink(Link): - """ - Defines a Link between a Path type and a Table that will + """Defines a Link between a Path type and a Table that will display the vertices of selected path. + """ vertex_columns = param.List(default=[]) @@ -163,8 +163,8 @@ def __init__(self, source, target, **params): class RectanglesTableLink(Link): - """ - Links a Rectangles element to a Table. + """Links a Rectangles element to a Table. + """ _requires_target = True diff --git a/holoviews/plotting/mixins.py b/holoviews/plotting/mixins.py index 8da7c7bf7a..fa11f5ddac 100644 --- a/holoviews/plotting/mixins.py +++ b/holoviews/plotting/mixins.py @@ -9,9 +9,9 @@ class GeomMixin: def get_extents(self, element, ranges, range_type='combined', **kwargs): - """ - Use first two key dimensions to set names, and all four + """Use first two key dimensions to set names, and all four to set the data range. + """ kdims = element.kdims # loop over start and end points of segments @@ -39,8 +39,8 @@ def get_extents(self, element, ranges, range_type='combined', **kwargs): class ChordMixin: def get_extents(self, element, ranges, range_type='combined', **kwargs): - """ - A Chord plot is always drawn on a unit circle. + """A Chord plot is always drawn on a unit circle. + """ xdim, ydim = element.nodes.kdims[:2] if range_type not in ('combined', 'data', 'extents'): @@ -143,10 +143,10 @@ def _get_axis_dims(self, element): return (xdims, element.vdims[0]) def get_extents(self, element, ranges, range_type='combined', **kwargs): - """ - Make adjustments to plot extents by computing + """Make adjustments to plot extents by computing stacked bar heights, adjusting the bar baseline and forcing the x-axis to be categorical. + """ if self.batched: overlay = self.current_frame @@ -185,8 +185,8 @@ def get_extents(self, element, ranges, range_type='combined', **kwargs): return (x0, y0, x1, y1) def _get_coords(self, element, ranges, as_string=True): - """ - Get factors for categorical axes. + """Get factors for categorical axes. + """ gdim = None sdim = None diff --git a/holoviews/plotting/mpl/__init__.py b/holoviews/plotting/mpl/__init__.py index d9a386023f..778b911cb2 100644 --- a/holoviews/plotting/mpl/__init__.py +++ b/holoviews/plotting/mpl/__init__.py @@ -37,9 +37,9 @@ def set_style(key): - """ - Select a style by name, e.g. set_style('default'). To revert to the + """Select a style by name, e.g. set_style('default'). To revert to the previous style use the key 'unset' or False. + """ if key is None: return diff --git a/holoviews/plotting/mpl/annotation.py b/holoviews/plotting/mpl/annotation.py index 75783c4ac0..1551e2bc85 100644 --- a/holoviews/plotting/mpl/annotation.py +++ b/holoviews/plotting/mpl/annotation.py @@ -13,10 +13,9 @@ class ABLine2D(Line2D): - - """ - Draw a line based on its slope and y-intercept. Additional arguments are + """Draw a line based on its slope and y-intercept. Additional arguments are passed to the constructor. + """ def __init__(self, slope, intercept, *args, **kwargs): @@ -37,7 +36,9 @@ def __init__(self, slope, intercept, *args, **kwargs): self.axes.callbacks.connect('ylim_changed', self._update_lim) def _update_lim(self, event): - """ called whenever axis x/y limits change """ + """Called whenever axis x/y limits change + + """ x = np.array(self.axes.get_xbound()) y = (self._slope * x) + self._intercept self.set_data(x, y) @@ -45,8 +46,8 @@ def _update_lim(self, event): class AnnotationPlot(ElementPlot): - """ - AnnotationPlot handles the display of all annotation elements. + """AnnotationPlot handles the display of all annotation elements. + """ show_legend = param.Boolean(default=False, doc=""" @@ -80,7 +81,9 @@ def update_handles(self, key, axis, annotation, ranges, style): class VLinePlot(AnnotationPlot): - "Draw a vertical line on the axis" + """Draw a vertical line on the axis + + """ style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible'] @@ -92,12 +95,16 @@ def draw_annotation(self, axis, position, opts): class HLinePlot(AnnotationPlot): - "Draw a horizontal line on the axis" + """Draw a horizontal line on the axis + + """ style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, position, opts): - "Draw a horizontal line on the axis" + """Draw a horizontal line on the axis + + """ if self.invert_axes: return [axis.axvline(position, **opts)] else: @@ -105,13 +112,17 @@ def draw_annotation(self, axis, position, opts): class VSpanPlot(AnnotationPlot): - "Draw a vertical span on the axis" + """Draw a vertical span on the axis + + """ style_opts = ['alpha', 'color', 'facecolor', 'edgecolor', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, positions, opts): - "Draw a vertical span on the axis" + """Draw a vertical span on the axis + + """ if self.invert_axes: return [axis.axhspan(*positions, **opts)] else: @@ -119,13 +130,17 @@ def draw_annotation(self, axis, positions, opts): class HSpanPlot(AnnotationPlot): - "Draw a horizontal span on the axis" + """Draw a horizontal span on the axis + + """ style_opts = ['alpha', 'color', 'facecolor', 'edgecolor', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, positions, opts): - "Draw a horizontal span on the axis" + """Draw a horizontal span on the axis + + """ if self.invert_axes: return [axis.axvspan(*positions, **opts)] else: @@ -137,7 +152,9 @@ class SlopePlot(AnnotationPlot): style_opts = ['alpha', 'color', 'linewidth', 'linestyle', 'visible'] def draw_annotation(self, axis, position, opts): - "Draw a horizontal line on the axis" + """Draw a horizontal line on the axis + + """ gradient, intercept = position if self.invert_axes: if gradient == 0: @@ -149,7 +166,9 @@ def draw_annotation(self, axis, position, opts): class TextPlot(AnnotationPlot): - "Draw the Text annotation object" + """Draw the Text annotation object + + """ style_opts = ['alpha', 'color', 'family', 'weight', 'visible'] @@ -242,7 +261,9 @@ def teardown_handles(self): class ArrowPlot(AnnotationPlot): - "Draw an arrow using the information supplied to the Arrow annotation" + """Draw an arrow using the information supplied to the Arrow annotation + + """ _arrow_style_opts = ['alpha', 'color', 'lw', 'linewidth', 'visible'] _text_style_opts = [*TextPlot.style_opts, 'textsize', 'fontsize'] @@ -272,7 +293,9 @@ def draw_annotation(self, axis, data, opts): class SplinePlot(AnnotationPlot): - "Draw the supplied Spline annotation (see Spline docstring)" + """Draw the supplied Spline annotation (see Spline docstring) + + """ style_opts = ['alpha', 'edgecolor', 'linewidth', 'linestyle', 'visible'] diff --git a/holoviews/plotting/mpl/chart.py b/holoviews/plotting/mpl/chart.py index 7578bdcbff..915ee6cff7 100644 --- a/holoviews/plotting/mpl/chart.py +++ b/holoviews/plotting/mpl/chart.py @@ -29,20 +29,20 @@ class ChartPlot(ElementPlot): - """ - Baseclass to plot Chart elements. + """Baseclass to plot Chart elements. + """ class CurvePlot(ChartPlot): - """ - CurvePlot can plot Curve and ViewMaps of Curve, which can be + """CurvePlot can plot Curve and ViewMaps of Curve, which can be displayed as a single frame or animation. Axes, titles and legends are automatically generated from dim_info. If the dimension is set to cyclic in the dim_info it will rotate the curve so that minimum y values are at the minimum x value to make the plots easier to interpret. + """ autotick = param.Boolean(default=False, doc=""" @@ -111,10 +111,10 @@ def update_handles(self, key, axis, element, ranges, style): class ErrorPlot(ColorbarPlot): - """ - ErrorPlot plots the ErrorBar Element type and supporting + """ErrorPlot plots the ErrorBar Element type and supporting both horizontal and vertical error bars via the 'horizontal' plot option. + """ style_opts = ['edgecolor', 'elinewidth', 'capsize', 'capthick', @@ -259,8 +259,8 @@ class SideAreaPlot(AdjoinedPlot, AreaPlot): class SpreadPlot(AreaPlot): - """ - SpreadPlot plots the Spread Element type. + """SpreadPlot plots the Spread Element type. + """ padding = param.ClassSelector(default=(0, 0.1), class_=(int, float, tuple)) @@ -286,10 +286,10 @@ def get_extents(self, element, ranges, range_type='combined', **kwargs): class HistogramPlot(ColorbarPlot): - """ - HistogramPlot can plot DataHistograms and ViewMaps of + """HistogramPlot can plot DataHistograms and ViewMaps of DataHistograms, which can be displayed as a single frame or animation. + """ style_opts = ['alpha', 'color', 'align', 'visible', 'facecolor', @@ -358,8 +358,8 @@ def initialize_plot(self, ranges=None): def _process_hist(self, hist): - """ - Get data from histogram, including bin_ranges and values. + """Get data from histogram, including bin_ranges and values. + """ self.cyclic = hist.get_dimension(0).cyclic x = hist.kdims[0] @@ -377,9 +377,9 @@ def _process_hist(self, hist): return edges[:-1], hist_vals, widths, xlim+ylim, is_datetime def _compute_ticks(self, element, edges, widths, lims): - """ - Compute the ticks either as cyclic values in degrees or as roughly + """Compute the ticks either as cyclic values in degrees or as roughly evenly spaced bin centers. + """ if self.xticks is None or not isinstance(self.xticks, int): return None @@ -404,24 +404,24 @@ def get_extents(self, element, ranges, range_type='combined', **kwargs): return super().get_extents(element, ranges, range_type) def _process_axsettings(self, hist, lims, ticks): - """ - Get axis settings options including ticks, x- and y-labels + """Get axis settings options including ticks, x- and y-labels and limits. + """ axis_settings = dict(zip(self.axis_settings, [None, None, (None if self.overlaid else ticks)])) return axis_settings def _update_plot(self, key, hist, bars, lims, ranges): - """ - Process bars can be subclassed to manually adjust bars + """Process bars can be subclassed to manually adjust bars after being plotted. + """ return bars def _update_artists(self, key, hist, edges, hvals, widths, lims, ranges): - """ - Update all the artists in the histogram. Subclassable to + """Update all the artists in the histogram. Subclassable to allow updating of further artists. + """ plot_vals = zip(self.handles['artist'], edges, hvals, widths) for bar, edge, height, width in plot_vals: @@ -457,8 +457,8 @@ class SideHistogramPlot(AdjoinedPlot, HistogramPlot): Whether to overlay a grid on the axis.""") def _process_hist(self, hist): - """ - Subclassed to offset histogram by defined amount. + """Subclassed to offset histogram by defined amount. + """ edges, hvals, widths, lims, isdatetime = super()._process_hist(hist) offset = self.offset * lims[3] @@ -472,11 +472,11 @@ def _update_artists(self, n, element, edges, hvals, widths, lims, ranges): self._update_plot(n, element, self.handles['artist'], lims, ranges) def _update_plot(self, key, element, bars, lims, ranges): - """ - Process the bars and draw the offset line as necessary. If a + """Process the bars and draw the offset line as necessary. If a color map is set in the style of the 'main' ViewableElement object, color the bars appropriately, respecting the required normalization settings. + """ main = self.adjoined.main _, y1 = element.range(1) @@ -524,9 +524,9 @@ def _update_plot(self, key, element, bars, lims, ranges): return bars def _colorize_bars(self, cmap, bars, element, main_range, dim): - """ - Use the given cmap to color the bars, applying the correct + """Use the given cmap to color the bars, applying the correct color ranges as necessary. + """ cmap_range = main_range[1] - main_range[0] lower_bound = main_range[0] @@ -537,9 +537,9 @@ def _colorize_bars(self, cmap, bars, element, main_range, dim): bar.set_clip_on(False) def _update_separator(self, offset): - """ - Compute colorbar offset and update separator line + """Compute colorbar offset and update separator line if map is non-zero. + """ offset_line = self.handles['offset_line'] if offset == 0: @@ -553,9 +553,9 @@ def _update_separator(self, offset): class PointPlot(ChartPlot, ColorbarPlot, LegendPlot): - """ - Note that the 'cmap', 'vmin' and 'vmax' style arguments control + """Note that the 'cmap', 'vmin' and 'vmax' style arguments control how point magnitudes are rendered to different colors. + """ show_grid = param.Boolean(default=False, doc=""" @@ -678,8 +678,7 @@ def update_handles(self, key, axis, element, ranges, style): class VectorFieldPlot(ColorbarPlot): - """ - Renders vector fields in sheet coordinates. The vectors are + """Renders vector fields in sheet coordinates. The vectors are expressed in polar coordinates and may be displayed according to angle alone (with some common, arbitrary arrow length) or may be true polar vectors. @@ -692,6 +691,7 @@ class VectorFieldPlot(ColorbarPlot): normalize_lengths and rescale_lengths plot option, which will normalize the lengths to a maximum of 1 and scale them according to the minimum distance respectively. + """ arrow_heads = param.Boolean(default=True, doc=""" @@ -866,8 +866,8 @@ class BarPlot(BarsMixin, ColorbarPlot, LegendPlot): ) def _get_values(self, element, ranges): - """ - Get unique index value for each bar + """Get unique index value for each bar + """ gvals, cvals = self._get_coords(element, ranges, as_string=False) kdims = element.kdims @@ -908,8 +908,8 @@ def initialize_plot(self, ranges=None): dimensions=[xdims, vdim], **kwargs) def _finalize_ticks(self, axis, element, xticks, yticks, zticks): - """ - Apply ticks with appropriate offsets. + """Apply ticks with appropriate offsets. + """ alignments = None ticks = xticks or yticks diff --git a/holoviews/plotting/mpl/chart3d.py b/holoviews/plotting/mpl/chart3d.py index bd175b94c5..800c687285 100644 --- a/holoviews/plotting/mpl/chart3d.py +++ b/holoviews/plotting/mpl/chart3d.py @@ -14,9 +14,9 @@ class Plot3D(ColorbarPlot): - """ - Plot3D provides a common baseclass for mplot3d based + """Plot3D provides a common baseclass for mplot3d based plots. + """ azimuth = param.Integer(default=-60, bounds=(-180, 180), doc=""" @@ -56,9 +56,9 @@ class Plot3D(ColorbarPlot): Whether and where to display the yaxis.""") def _finalize_axis(self, key, **kwargs): - """ - Extends the ElementPlot _finalize_axis method to set appropriate + """Extends the ElementPlot _finalize_axis method to set appropriate labels, and axes options for 3D Plots. + """ axis = self.handles['axis'] self.handles['fig'].set_frameon(False) @@ -115,10 +115,10 @@ def _draw_colorbar(self, element=None, dim=None, redraw=True): class Scatter3DPlot(Plot3D, PointPlot): - """ - Subclass of PointPlot allowing plotting of Points + """Subclass of PointPlot allowing plotting of Points on a 3D axis, also allows mapping color and size onto a particular Dimension of the data. + """ color_index = param.ClassSelector(default=None, class_=(str, int), @@ -153,8 +153,8 @@ def update_handles(self, key, axis, element, ranges, style): class Path3DPlot(Plot3D, PathPlot): - """ - Allows plotting paths on a 3D axis. + """Allows plotting paths on a 3D axis. + """ style_opts = ['alpha', 'color', 'linestyle', 'linewidth', 'visible', 'cmap'] @@ -185,11 +185,11 @@ def update_handles(self, key, axis, element, ranges, style): class SurfacePlot(Plot3D): - """ - Plots surfaces wireframes and contours in 3D space. + """Plots surfaces wireframes and contours in 3D space. Provides options to switch the display type via the plot_type parameter has support for a number of styling options including strides and colors. + """ colorbar = param.Boolean(default=False, doc=""" @@ -232,9 +232,9 @@ def get_data(self, element, ranges, style): class TriSurfacePlot(Plot3D): - """ - Plots a trisurface given a TriSurface element, containing + """Plots a trisurface given a TriSurface element, containing X, Y and Z coordinates. + """ colorbar = param.Boolean(default=False, doc=""" diff --git a/holoviews/plotting/mpl/element.py b/holoviews/plotting/mpl/element.py index b9adc14c14..5c74c62038 100644 --- a/holoviews/plotting/mpl/element.py +++ b/holoviews/plotting/mpl/element.py @@ -115,12 +115,12 @@ def __init__(self, element, **params): def _finalize_axis(self, key, element=None, title=None, dimensions=None, ranges=None, xticks=None, yticks=None, zticks=None, xlabel=None, ylabel=None, zlabel=None): - """ - Applies all the axis settings before the axis or figure is returned. + """Applies all the axis settings before the axis or figure is returned. Only plots with zorder 0 get to apply their settings. When the number of the frame is supplied as n, this method looks up and computes the appropriate title, axis labels and axis bounds. + """ if element is None: element = self._get_frame(key) @@ -205,10 +205,10 @@ def _execute_hooks(self, element): self._update_backend_opts() def _finalize_ticks(self, axis, dimensions, xticks, yticks, zticks): - """ - Finalizes the ticks on the axes based on the supplied ticks + """Finalizes the ticks on the axes based on the supplied ticks and Elements. Sets the axes position as well as tick positions, labels and fontsize. + """ ndims = len(dimensions) if dimensions else 0 xdim = dimensions[0] if ndims else None @@ -290,16 +290,16 @@ def _update_backend_opts(self): ) def _finalize_artist(self, element): - """ - Allows extending the _finalize_axis method with Element + """Allows extending the _finalize_axis method with Element specific options. + """ def _set_labels(self, axes, dimensions, xlabel=None, ylabel=None, zlabel=None): - """ - Sets the labels of the axes using the supplied list of dimensions. + """Sets the labels of the axes using the supplied list of dimensions. Optionally explicit labels may be supplied to override the dimension label. + """ xlabel, ylabel, zlabel = self._get_axis_labels(dimensions, xlabel, ylabel, zlabel) if self.invert_axes: @@ -313,8 +313,8 @@ def _set_labels(self, axes, dimensions, xlabel=None, ylabel=None, zlabel=None): def _set_axis_formatter(self, axis, dim, formatter): - """ - Set axis formatter based on dimension formatter. + """Set axis formatter based on dimension formatter. + """ if isinstance(dim, list): dim = dim[0] if formatter is not None or dim is None: @@ -328,8 +328,8 @@ def _set_axis_formatter(self, axis, dim, formatter): def get_aspect(self, xspan, yspan): - """ - Computes the aspect ratio of the plot + """Computes the aspect ratio of the plot + """ if isinstance(self.aspect, (int, float)): return self.aspect @@ -341,8 +341,8 @@ def get_aspect(self, xspan, yspan): def _set_aspect(self, axes, aspect): - """ - Set the aspect on the axes based on the aspect setting. + """Set the aspect on the axes based on the aspect setting. + """ if isinstance(self.projection, str) and self.projection == '3d': return @@ -363,8 +363,8 @@ def _set_aspect(self, axes, aspect): def _set_axis_limits(self, axis, view, subplots, ranges): - """ - Compute extents for current view and apply as axis limits + """Compute extents for current view and apply as axis limits + """ # Extents extents = self.get_extents(view, ranges) @@ -429,13 +429,13 @@ def _compute_limits(self, low, high, log, invert, low_key, high_key): def _set_axis_position(self, axes, axis, option): - """ - Set the position and visibility of the xaxis or yaxis by + """Set the position and visibility of the xaxis or yaxis by supplying the axes object, the axis to set, i.e. 'x' or 'y' and an option to specify the position and visibility of the axis. The option may be None, 'bare' or positional, i.e. 'left' and 'right' for the yaxis and 'top' and 'bottom' for the xaxis. May also combine positional and 'bare' into for example 'left-bare'. + """ positions = {'x': ['bottom', 'top'], 'y': ['left', 'right']}[axis] axis = axes.xaxis if axis == 'x' else axes.yaxis @@ -460,13 +460,13 @@ def _set_axis_position(self, axes, axis, option): def _set_axis_ticks(self, axis, ticks, log=False, rotation=0): - """ - Allows setting the ticks for a particular axis either with + """Allows setting the ticks for a particular axis either with a tuple of ticks, a tick locator object, an integer number of ticks, a list of tuples containing positions and labels or a list of positions. Also supports enabling log ticking if an integer number of ticks is supplied and setting a rotation for the ticks. + """ if isinstance(ticks, np.ndarray): ticks = list(ticks) @@ -497,13 +497,13 @@ def _set_axis_ticks(self, axis, ticks, log=False, rotation=0): @mpl_rc_context def update_frame(self, key, ranges=None, element=None): - """ - Set the plot(s) to the given frame number. Operates by + """Set the plot(s) to the given frame number. Operates by manipulating the matplotlib objects held in the self._handles dictionary. If n is greater than the number of available frames, update using the last available frame. + """ reused = isinstance(self.hmap, DynamicMap) and self.overlaid self.prev_frame = self.current_frame @@ -584,9 +584,9 @@ def initialize_plot(self, ranges=None): def init_artists(self, ax, plot_args, plot_kwargs): - """ - Initializes the artist based on the plot method declared on + """Initializes the artist based on the plot method declared on the plot. + """ plot_method = self._plot_methods.get('batched' if self.batched else 'single') plot_fn = getattr(ax, plot_method) @@ -602,8 +602,8 @@ def init_artists(self, ax, plot_args, plot_kwargs): def update_handles(self, key, axis, element, ranges, style): - """ - Update the elements of the plot. + """Update the elements of the plot. + """ self.teardown_handles() handles, axis_kwargs = self.render_artists(element, ranges, style, axis) @@ -714,10 +714,10 @@ def _apply_transforms(self, element, ranges, style): def teardown_handles(self): - """ - If no custom update_handles method is supplied this method + """If no custom update_handles method is supplied this method is called to tear down any previous handles before replacing them. + """ if 'artist' in self.handles: self.handles['artist'].remove() @@ -901,9 +901,9 @@ def _draw_colorbar(self, element=None, dimension=None, redraw=True): def _norm_kwargs(self, element, ranges, opts, vdim, values=None, prefix=''): - """ - Returns valid color normalization kwargs + """Returns valid color normalization kwargs to be passed to matplotlib plot function. + """ dim_name = dim_range_key(vdim) if values is None: @@ -1109,8 +1109,8 @@ def _legend_opts(self): class OverlayPlot(LegendPlot, GenericOverlayPlot): - """ - OverlayPlot supports compositors processing of Overlays across maps. + """OverlayPlot supports compositors processing of Overlays across maps. + """ _passed_handles = ['fig', 'axis'] @@ -1135,9 +1135,9 @@ def _finalize_artist(self, element): subplot._finalize_artist(element) def _adjust_legend(self, overlay, axis): - """ - Accumulate the legend handles and labels for all subplots + """Accumulate the legend handles and labels for all subplots and set up the legend + """ legend_data = [] legend_plot = True diff --git a/holoviews/plotting/mpl/geometry.py b/holoviews/plotting/mpl/geometry.py index 9620cd75e0..144d6934f0 100644 --- a/holoviews/plotting/mpl/geometry.py +++ b/holoviews/plotting/mpl/geometry.py @@ -9,10 +9,11 @@ class SegmentPlot(GeomMixin, ColorbarPlot): - """ - Segments are lines in 2D space where each two key dimensions specify a + """Segments are lines in 2D space where each two key dimensions specify a (x, y) node of the line. + """ + style_opts = [*PathPlot.style_opts, 'cmap'] _nonvectorized_styles = ['cmap'] @@ -42,9 +43,9 @@ def get_data(self, element, ranges, style): class RectanglesPlot(GeomMixin, ColorbarPlot): - """ - Rectangles are polygons in 2D space where the key dimensions represent + """Rectangles are polygons in 2D space where the key dimensions represent the bottom-left and top-right corner of the rectangle. + """ style_opts = PolygonPlot.style_opts diff --git a/holoviews/plotting/mpl/heatmap.py b/holoviews/plotting/mpl/heatmap.py index 681e0a2949..6b26521fe0 100644 --- a/holoviews/plotting/mpl/heatmap.py +++ b/holoviews/plotting/mpl/heatmap.py @@ -252,6 +252,7 @@ class RadialHeatMapPlot(ColorbarPlot): def _map_order_to_ticks(start, end, order, reverse=False): """Map elements from given `order` array to bins ranging from `start` to `end`. + """ size = len(order) bounds = np.linspace(start, end, size + 1) diff --git a/holoviews/plotting/mpl/path.py b/holoviews/plotting/mpl/path.py index 5685d374db..8ded220de2 100644 --- a/holoviews/plotting/mpl/path.py +++ b/holoviews/plotting/mpl/path.py @@ -160,12 +160,12 @@ def get_data(self, element, ranges, style): class PolygonPlot(ContourPlot): - """ - PolygonPlot draws the polygon paths in the supplied Polygons + """PolygonPlot draws the polygon paths in the supplied Polygons object. If the Polygon has an associated value the color of Polygons will be drawn from the supplied cmap, otherwise the supplied facecolor will apply. Facecolor also determines the color for non-finite values. + """ show_legend = param.Boolean(default=False, doc=""" diff --git a/holoviews/plotting/mpl/plot.py b/holoviews/plotting/mpl/plot.py index 25fc44cc33..fb95272915 100644 --- a/holoviews/plotting/mpl/plot.py +++ b/holoviews/plotting/mpl/plot.py @@ -40,8 +40,8 @@ @contextmanager def _rc_context(rcparams): - """ - Context manager that temporarily overrides the pyplot rcParams. + """Context manager that temporarily overrides the pyplot rcParams. + """ old_rcparams = get_old_rcparams() mpl.rcParams.clear() @@ -53,9 +53,9 @@ def _rc_context(rcparams): mpl.rcParams.update(old_rcparams) def mpl_rc_context(f): - """ - Decorator for MPLPlot methods applying the matplotlib rc params + """Decorator for MPLPlot methods applying the matplotlib rc params in the plots fig_rcparams while when method is called. + """ def wrapper(self, *args, **kwargs): with _rc_context(self.fig_rcparams): @@ -64,13 +64,13 @@ def wrapper(self, *args, **kwargs): class MPLPlot(DimensionedPlot): - """ - An MPLPlot object draws a matplotlib figure object when called or + """An MPLPlot object draws a matplotlib figure object when called or indexed but can also return a matplotlib animation object as appropriate. MPLPlots take element objects such as Image, Contours or Points as inputs and plots them in the appropriate format using matplotlib. As HoloMaps are supported, all plots support animation via the anim() method. + """ backend = 'matplotlib' @@ -171,9 +171,9 @@ def __init__(self, fig=None, axis=None, **params): @mpl_rc_context def _init_axis(self, fig, axis): - """ - Return an axis which may need to be initialized from + """Return an axis which may need to be initialized from a new figure. + """ if not fig and self._create_fig: fig = plt.figure() @@ -249,8 +249,8 @@ def _subplot_label(self, axis): def _finalize_axis(self, key): - """ - General method to finalize the axis and plot. + """General method to finalize the axis and plot. + """ if 'title' in self.handles: self.handles['title'].set_visible(self.show_title) @@ -270,9 +270,9 @@ def state(self): return self.handles['fig'] def anim(self, start=0, stop=None, fps=30): - """ - Method to return a matplotlib animation. The start and stop + """Method to return a matplotlib animation. The start and stop frames may be specified as well as the fps. + """ figure = self.state or self.initialize_plot() anim = animation.FuncAnimation(figure, self.update_frame, @@ -291,9 +291,9 @@ def update(self, key): class CompositePlot(GenericCompositePlot, MPLPlot): - """ - CompositePlot provides a baseclass for plots coordinate multiple + """CompositePlot provides a baseclass for plots coordinate multiple subplots to form a Layout. + """ shared_axes = param.Boolean(default=True, doc=""" @@ -301,9 +301,9 @@ class CompositePlot(GenericCompositePlot, MPLPlot): disabled switches axiswise normalization option on globally.""") def _link_dimensioned_streams(self): - """ - Should perform any linking required to update titles when dimensioned + """Should perform any linking required to update titles when dimensioned streams change. + """ streams = [s for s in self.streams if any(k in self.dimensions for k in s.contents)] for s in streams: @@ -333,9 +333,9 @@ def update_frame(self, key, ranges=None): class GridPlot(CompositePlot): - """ - Plot a group of elements in a grid layout based on a GridSpace element + """Plot a group of elements in a grid layout based on a GridSpace element object. + """ aspect = param.Parameter(default='equal', doc=""" @@ -655,11 +655,11 @@ def _adjust_subplots(self, axis, subaxes): class AdjointLayoutPlot(MPLPlot, GenericAdjointLayoutPlot): - """ - Initially, a AdjointLayoutPlot computes an appropriate layout based for + """Initially, a AdjointLayoutPlot computes an appropriate layout based for the number of Views in the AdjointLayout object it has been given, but when embedded in a NdLayout, it can recompute the layout to match the number of rows and columns as part of a larger grid. + """ layout_dict = {'Single': ['main'], @@ -680,12 +680,12 @@ def __init__(self, layout, layout_type, subaxes, subplots, **params): @mpl_rc_context def initialize_plot(self, ranges=None): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ for pos in self.view_positions: # Pos will be one of 'main', 'top' or 'right' or None @@ -702,13 +702,13 @@ def initialize_plot(self, ranges=None): self.drawn = True def adjust_positions(self, redraw=True): - """ - Make adjustments to the positions of subplots (if available) + """Make adjustments to the positions of subplots (if available) relative to the main plot axes as required. This method is called by LayoutPlot after an initial pass used to position all the Layouts together. This method allows LayoutPlots to make final adjustments to the axis positions. + """ checks = [self.view_positions, self.subaxes, self.subplots] right = all('right' in check for check in checks) @@ -760,9 +760,9 @@ def __len__(self): class LayoutPlot(GenericLayoutPlot, CompositePlot): - """ - A LayoutPlot accepts either a Layout or a NdLayout and + """A LayoutPlot accepts either a Layout or a NdLayout and displays the elements in a cartesian grid in scanline order. + """ absolute_scaling = param.Selector(default=False, doc=""" @@ -808,8 +808,7 @@ def __init__(self, layout, keys=None, **params): [GenericElementPlot]) def _compute_gridspec(self, layout): - """ - Computes the tallest and widest cell for each row and column + """Computes the tallest and widest cell for each row and column by examining the Layouts in the GridSpace. The GridSpec is then instantiated and the LayoutPlots are configured with the appropriate embedded layout_types. The first element of the @@ -817,6 +816,7 @@ def _compute_gridspec(self, layout): by row and column. The second dictionary in the tuple supplies the grid indices needed to instantiate the axes for each LayoutPlot. + """ layout_items = layout.grid_items() layout_dimensions = layout.kdims if isinstance(layout, NdLayout) else None @@ -1019,8 +1019,7 @@ def _compute_gridspec(self, layout): return layout_subplots, layout_axes, collapsed_layout def grid_situate(self, current_idx, layout_type, subgrid_width): - """ - Situate the current AdjointLayoutPlot in a LayoutPlot. The + """Situate the current AdjointLayoutPlot in a LayoutPlot. The LayoutPlot specifies a layout_type into which the AdjointLayoutPlot must be embedded. This enclosing layout is guaranteed to have enough cells to display all the views. @@ -1030,6 +1029,7 @@ def grid_situate(self, current_idx, layout_type, subgrid_width): arrangement) is updated to the appropriate embedded value. It will also return a list of gridspec indices associated with the all the required layout axes. + """ # Set the layout configuration as situated in a NdLayout @@ -1052,12 +1052,12 @@ def grid_situate(self, current_idx, layout_type, subgrid_width): return start, inds def _create_subplots(self, layout, positions, layout_dimensions, ranges, axes=None, num=1, create=True): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ if axes is None: axes = {} diff --git a/holoviews/plotting/mpl/raster.py b/holoviews/plotting/mpl/raster.py index 087e9d4443..08171e9875 100644 --- a/holoviews/plotting/mpl/raster.py +++ b/holoviews/plotting/mpl/raster.py @@ -206,11 +206,11 @@ def init_artists(self, ax, plot_args, plot_kwargs): class RasterGridPlot(GridPlot, OverlayPlot): - """ - RasterGridPlot evenly spaces out plots of individual projections on + """RasterGridPlot evenly spaces out plots of individual projections on a grid, even when they differ in size. Since this class uses a single axis to generate all the individual plots it is much faster than the equivalent using subplots. + """ padding = param.Number(default=0.1, doc=""" diff --git a/holoviews/plotting/mpl/renderer.py b/holoviews/plotting/mpl/renderer.py index a2e93f109f..fdef79d637 100644 --- a/holoviews/plotting/mpl/renderer.py +++ b/holoviews/plotting/mpl/renderer.py @@ -31,8 +31,7 @@ class OutputWarning(param.Parameterized):pass class MPLRenderer(Renderer): - """ - Exporter used to render data from matplotlib, either to a stream + """Exporter used to render data from matplotlib, either to a stream or directly to file. The __call__ method renders an HoloViews component to raw data of @@ -42,7 +41,9 @@ class MPLRenderer(Renderer): The save_fig and save_anim methods are used to save matplotlib figure and animation objects. These match the two primary return types of plotting class implemented with matplotlib. + """ + drawn = {} backend = param.String('matplotlib', doc="The backend name.") @@ -75,9 +76,9 @@ class MPLRenderer(Renderer): counter = 0 def show(self, obj): - """ - Renders the supplied object and displays it using the active + """Renders the supplied object and displays it using the active GUI backend. + """ if self.interactive: if isinstance(obj, list): @@ -101,8 +102,7 @@ def show(self, obj): @classmethod def plot_options(cls, obj, percent_size): - """ - Given a holoviews object and a percentage size, apply heuristics + """Given a holoviews object and a percentage size, apply heuristics to compute a suitable figure size. For instance, scaling layouts and grids linearly can result in unwieldy figure sizes when there are a large number of elements. As ad hoc heuristics are used, @@ -112,6 +112,7 @@ def plot_options(cls, obj, percent_size): Used by the IPython Notebook display hooks and the save utility. Note that this can be overridden explicitly per object using the fig_size and size plot options. + """ from .plot import MPLPlot factor = percent_size / 100.0 @@ -131,13 +132,13 @@ def get_size(self_or_cls, plot): def _figure_data(self, plot, fmt, bbox_inches='tight', as_script=False, **kwargs): - """ - Render matplotlib figure object and return the corresponding + """Render matplotlib figure object and return the corresponding data. If as_script is True, the content will be split in an HTML and a JS component. Similar to IPython.core.pylabtools.print_figure but without any IPython dependency. + """ if fmt in ['gif', 'mp4', 'webm']: with mpl.rc_context(rc=plot.fig_rcparams): @@ -183,8 +184,8 @@ def _figure_data(self, plot, fmt, bbox_inches='tight', as_script=False, **kwargs def _anim_data(self, anim, fmt): - """ - Render a matplotlib animation object and return the corresponding data. + """Render a matplotlib animation object and return the corresponding data. + """ (writer, _, anim_kwargs, extra_args) = ANIMATION_OPTS[fmt] if extra_args != []: @@ -203,14 +204,14 @@ def _anim_data(self, anim, fmt): def _compute_bbox(self, fig, kw): - """ - Compute the tight bounding box for each figure once, reducing + """Compute the tight bounding box for each figure once, reducing number of required canvas draw calls from N*2 to N+1 as a function of the number of frames. Tight bounding box computing code here mirrors: matplotlib.backend_bases.FigureCanvasBase.print_figure as it hasn't been factored out as a function. + """ fig_id = id(fig) if kw['bbox_inches'] == 'tight': @@ -240,8 +241,8 @@ def state(cls): @classmethod def load_nb(cls, inline=True): - """ - Initialize matplotlib backend + """Initialize matplotlib backend + """ import matplotlib.pyplot as plt backend = plt.get_backend() diff --git a/holoviews/plotting/mpl/sankey.py b/holoviews/plotting/mpl/sankey.py index f452fb7c53..64c5288719 100644 --- a/holoviews/plotting/mpl/sankey.py +++ b/holoviews/plotting/mpl/sankey.py @@ -47,8 +47,8 @@ class SankeyPlot(GraphPlot): style_opts = [*GraphPlot.style_opts, 'label_text_font_size'] def get_extents(self, element, ranges, range_type='combined', **kwargs): - """ - A Chord plot is always drawn on a unit circle. + """A Chord plot is always drawn on a unit circle. + """ if range_type == 'extents': return element.nodes.extents diff --git a/holoviews/plotting/mpl/stats.py b/holoviews/plotting/mpl/stats.py index 71330d58f3..3f307794b5 100644 --- a/holoviews/plotting/mpl/stats.py +++ b/holoviews/plotting/mpl/stats.py @@ -10,8 +10,8 @@ class DistributionPlot(AreaPlot): - """ - DistributionPlot visualizes a distribution of values as a KDE. + """DistributionPlot visualizes a distribution of values as a KDE. + """ bandwidth = param.Number(default=None, doc=""" @@ -25,11 +25,11 @@ class DistributionPlot(AreaPlot): class BivariatePlot(PolygonPlot): - """ - Bivariate plot visualizes two-dimensional kernel density + """Bivariate plot visualizes two-dimensional kernel density estimates. Additionally, by enabling the joint option, the marginals distributions can be plotted alongside each axis (does not animate or compose). + """ bandwidth = param.Number(default=None, doc=""" @@ -46,10 +46,10 @@ class BivariatePlot(PolygonPlot): class BoxPlot(MultiDistributionMixin, ChartPlot): - """ - BoxPlot plots the ErrorBar Element type and supporting + """BoxPlot plots the ErrorBar Element type and supporting both horizontal and vertical error bars via the 'horizontal' plot option. + """ style_opts = ['notch', 'sym', 'whis', 'bootstrap', @@ -131,10 +131,10 @@ def __init__(self, *args, **kwargs): class ViolinPlot(BoxPlot): - """ - BoxPlot plots the ErrorBar Element type and supporting + """BoxPlot plots the ErrorBar Element type and supporting both horizontal and vertical error bars via the 'horizontal' plot option. + """ bandwidth = param.Number(default=None, doc=""" diff --git a/holoviews/plotting/mpl/tabular.py b/holoviews/plotting/mpl/tabular.py index e779c5c6e7..1849e805d3 100644 --- a/holoviews/plotting/mpl/tabular.py +++ b/holoviews/plotting/mpl/tabular.py @@ -9,10 +9,10 @@ class TablePlot(ElementPlot): - """ - A TablePlot can plot both TableViews and ViewMaps which display + """A TablePlot can plot both TableViews and ViewMaps which display as either a single static table or as an animated table respectively. + """ border = param.Number(default=0.05, bounds=(0.0, 0.5), doc=""" diff --git a/holoviews/plotting/mpl/util.py b/holoviews/plotting/mpl/util.py index 9cf0bb0a0a..70e6e2f176 100644 --- a/holoviews/plotting/mpl/util.py +++ b/holoviews/plotting/mpl/util.py @@ -43,8 +43,8 @@ def is_color(color): - """ - Checks if supplied object is a valid color spec. + """Checks if supplied object is a valid color spec. + """ if not isinstance(color, str): return False @@ -101,21 +101,20 @@ def get_validator(style): def validate(style, value, vectorized=True): - """ - Validates a style and associated value. + """Validates a style and associated value. - Arguments - --------- - style: str + Parameters + ---------- + style : str The style to validate (e.g. 'color', 'size' or 'marker') - value: + value : The style value to validate - vectorized: bool + vectorized : bool Whether validator should allow vectorized setting Returns ------- - valid: boolean or None + valid : boolean or None If validation is supported returns boolean, otherwise None """ validator = get_validator(style) @@ -131,25 +130,24 @@ def validate(style, value, vectorized=True): def filter_styles(style, group, other_groups, blacklist=None): - """ - Filters styles which are specific to a particular artist, e.g. + """Filters styles which are specific to a particular artist, e.g. for a GraphPlot this will filter options specific to the nodes and edges. - Arguments - --------- - style: dict + Parameters + ---------- + style : dict Dictionary of styles and values - group: str + group : str Group within the styles to filter for - other_groups: list + other_groups : list Other groups to filter out - blacklist: list (optional) + blacklist : list, optional List of options to filter out Returns ------- - filtered: dict + filtered : dict Filtered dictionary of styles """ if blacklist is None: @@ -169,9 +167,9 @@ def filter_styles(style, group, other_groups, blacklist=None): def wrap_formatter(formatter): - """ - Wraps formatting function or string in + """Wraps formatting function or string in appropriate matplotlib formatter type. + """ if isinstance(formatter, ticker.Formatter): return formatter @@ -221,8 +219,8 @@ def compute_ratios(ratios, normalized=True): def axis_overlap(ax1, ax2): - """ - Tests whether two axes overlap vertically + """Tests whether two axes overlap vertically + """ b1, t1 = ax1.get_position().intervaly b2, t2 = ax2.get_position().intervaly @@ -230,10 +228,10 @@ def axis_overlap(ax1, ax2): def resolve_rows(rows): - """ - Recursively iterate over lists of axes merging + """Recursively iterate over lists of axes merging them by their vertical overlap leaving a list of rows. + """ merged_rows = [] for row in rows: @@ -254,9 +252,9 @@ def resolve_rows(rows): def fix_aspect(fig, nrows, ncols, title=None, extra_artists=None, vspace=0.2, hspace=0.2): - """ - Calculate heights and widths of axes and adjust + """Calculate heights and widths of axes and adjust the size of the figure to match the aspect. + """ if extra_artists is None: extra_artists = [] @@ -295,8 +293,8 @@ def fix_aspect(fig, nrows, ncols, title=None, extra_artists=None, def get_tight_bbox(fig, bbox_extra_artists=None, pad=None): - """ - Compute a tight bounding box around all the artists in the figure. + """Compute a tight bounding box around all the artists in the figure. + """ if bbox_extra_artists is None: bbox_extra_artists = [] @@ -333,8 +331,8 @@ def get_tight_bbox(fig, bbox_extra_artists=None, pad=None): def get_raster_array(image): - """ - Return the array data from any Raster or Image type + """Return the array data from any Raster or Image type + """ if isinstance(image, RGB): rgb = image.rgb @@ -350,9 +348,9 @@ def get_raster_array(image): def ring_coding(array): - """ - Produces matplotlib Path codes for exterior and interior rings + """Produces matplotlib Path codes for exterior and interior rings of a polygon geometry. + """ # The codes will be all "LINETO" commands, except for "MOVETO"s at the # beginning of each subpath @@ -364,10 +362,10 @@ def ring_coding(array): def polygons_to_path_patches(element): - """ - Converts Polygons into list of lists of matplotlib.patches.PathPatch + """Converts Polygons into list of lists of matplotlib.patches.PathPatch objects including any specified holes. Each list represents one (multi-)polygon. + """ paths = element.split(datatype='array', dimensions=element.kdims) has_holes = isinstance(element, Polygons) and element.interface.has_holes(element) @@ -396,8 +394,8 @@ def polygons_to_path_patches(element): class CFTimeConverter(NetCDFTimeConverter): - """ - Defines conversions for cftime types by extending nc_time_axis. + """Defines conversions for cftime types by extending nc_time_axis. + """ @classmethod diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py index 8f4e667719..d11a00d21b 100644 --- a/holoviews/plotting/plot.py +++ b/holoviews/plotting/plot.py @@ -1,7 +1,7 @@ -""" -Public API for all plots supported by HoloViews, regardless of +"""Public API for all plots supported by HoloViews, regardless of plotting package or backend. Every plotting classes must be a subclass of this Plot baseclass. + """ import uuid import warnings @@ -50,9 +50,9 @@ class Plot(param.Parameterized): - """ - Base class of all Plot classes in HoloViews, designed to be + """Base class of all Plot classes in HoloViews, designed to be general enough to use any plotting package or backend. + """ backend = None @@ -81,16 +81,16 @@ def __init__(self, renderer=None, root=None, **params): @property def state(self): - """ - The plotting state that gets updated via the update method and + """The plotting state that gets updated via the update method and used by the renderer to generate output. + """ raise NotImplementedError def set_root(self, root): - """ - Sets the root model on all subplots. + """Sets the root model on all subplots. + """ if root is None: return @@ -173,25 +173,25 @@ def comm(self, comm): def initialize_plot(self, ranges=None): - """ - Initialize the matplotlib figure. + """Initialize the matplotlib figure. + """ raise NotImplementedError def update(self, key): - """ - Update the internal state of the Plot to represent the given + """Update the internal state of the Plot to represent the given key tuple (where integers represent frames). Returns this state. + """ return self.state def cleanup(self): - """ - Cleans up references to the plot on the attached Stream + """Cleans up references to the plot on the attached Stream subscribers. + """ plots = self.traverse(lambda x: x, [Plot]) for plot in plots: @@ -208,9 +208,9 @@ def _session_destroy(self, session_context): self.cleanup() def refresh(self, **kwargs): - """ - Refreshes the plot by rerendering it and then pushing + """Refreshes the plot by rerendering it and then pushing the updated data if the plot has an associated Comm. + """ if self.renderer.mode == 'server' and not state._unblocked(self.document): # If we do not have the Document lock, schedule refresh as callback @@ -250,15 +250,17 @@ def refresh(self, **kwargs): def _trigger_refresh(self, key): - "Triggers update to a plot on a refresh event" + """Triggers update to a plot on a refresh event + + """ # Update if not top-level, batched or an ElementPlot if not self.top_level or isinstance(self, GenericElementPlot): with unlocked(): self.update(key) def push(self): - """ - Pushes plot updates to the frontend. + """Pushes plot updates to the frontend. + """ root = self._root if (root and self.pane is not None and @@ -278,8 +280,8 @@ def id(self): return self.comm.id if self.comm else id(self.state) def __len__(self): - """ - Returns the total number of available frames. + """Returns the total number of available frames. + """ raise NotImplementedError @@ -290,18 +292,18 @@ def lookup_options(cls, obj, group): class PlotSelector: - """ - Proxy that allows dynamic selection of a plotting class based on a + """Proxy that allows dynamic selection of a plotting class based on a function of the plotted object. Behaves like a Plot class and presents the same parameterized interface. + """ _disabled_opts = [] def __init__(self, selector, plot_classes, allow_mismatch=False): - """ - The selector function accepts a component instance and returns + """The selector function accepts a component instance and returns the appropriate key to index plot_classes dictionary. + """ self.selector = selector self.plot_classes = dict(plot_classes) @@ -363,10 +365,10 @@ def param(self): class DimensionedPlot(Plot): - """ - DimensionedPlot implements a number of useful methods + """DimensionedPlot implements a number of useful methods to compute dimension ranges and titles containing the dimension values. + """ fontsize = param.Parameter(default=None, allow_None=True, doc=""" @@ -433,8 +435,8 @@ def __init__(self, keys=None, dimensions=None, layout_dimensions=None, super().__init__(**params) def __getitem__(self, frame): - """ - Get the state of the Plot for a given frame number. + """Get the state of the Plot for a given frame number. + """ if isinstance(frame, int) and frame > len(self): self.param.warning(f"Showing last frame available: {len(self)}") @@ -445,14 +447,14 @@ def __getitem__(self, frame): return self.state def _get_frame(self, key): - """ - Required on each MPLPlot type to get the data corresponding + """Required on each MPLPlot type to get the data corresponding just to the current frame out from the object. + """ def matches(self, spec): - """ - Matches a specification against the current Plot. + """Matches a specification against the current Plot. + """ if callable(spec) and not isinstance(spec, type): return spec(self) elif isinstance(spec, type): return isinstance(self, spec) @@ -461,11 +463,11 @@ def matches(self, spec): def traverse(self, fn=None, specs=None, full_breadth=True): - """ - Traverses any nested DimensionedPlot returning a list + """Traverses any nested DimensionedPlot returning a list of all plots that match the specs. The specs should be supplied as a list of either Plot types or callables, which should return a boolean given the plot class. + """ accumulator = [] matches = specs is None @@ -486,9 +488,9 @@ def traverse(self, fn=None, specs=None, full_breadth=True): return accumulator def _frame_title(self, key, group_size=2, separator='\n'): - """ - Returns the formatted dimension group strings + """Returns the formatted dimension group strings for a particular frame. + """ if self.layout_dimensions is not None: dimensions, key = zip(*self.layout_dimensions.items()) @@ -516,18 +518,17 @@ def _format_title(self, key, dimensions=True, separator='\n'): return title.strip(' \n') def _format_title_components(self, key, dimensions=True, separator='\n'): - """ - Determine components of title as used by _format_title method. + """Determine components of title as used by _format_title method. To be overridden in child classes. Return signature: (label, group, type_name, dim_title) + """ return (self.label, self.group, type(self).__name__, '') def _get_fontsize_defaults(self): - """ - Should returns default fontsize for the following keywords: + """Should returns default fontsize for the following keywords: * ticks * minor_ticks @@ -538,6 +539,7 @@ def _get_fontsize_defaults(self): However may also provide more specific defaults for specific axis label or ticks, e.g. clabel or xticks. + """ return {} @@ -573,13 +575,13 @@ def _fontsize(self, key, label='fontsize', common=True): return {label: scale_fontsize(size, self.fontscale)} def compute_ranges(self, obj, key, ranges): - """ - Given an object, a specific key, and the normalization options, + """Given an object, a specific key, and the normalization options, this method will find the specified normalization options on the appropriate OptionTree, group the elements according to the selected normalization option (i.e. either per frame or over the whole animation) and finally compute the dimension ranges in each group. The new set of ranges is returned. + """ prev_frame = getattr(self, 'prev_frame', None) all_table = all(isinstance(el, Table) for el in obj.traverse(lambda x: x, [Element])) @@ -619,13 +621,13 @@ def compute_ranges(self, obj, key, ranges): return ranges def _get_norm_opts(self, obj): - """ - Gets the normalization options for a LabelledData object by + """Gets the normalization options for a LabelledData object by traversing the object to find elements and their ids. The id is then used to select the appropriate OptionsTree, accumulating the normalization options into a dictionary. Returns a dictionary of normalization options for each element in the tree. + """ norm_opts = {} @@ -884,17 +886,17 @@ def _compute_group_range(cls, group, elements, ranges, framewise, @classmethod def _traverse_options(cls, obj, opt_type, opts, specs=None, keyfn=None, defaults=True): - """ - Traverses the supplied object getting all options in opts for + """Traverses the supplied object getting all options in opts for the specified opt_type and specs. Also takes into account the plotting class defaults for plot options. If a keyfn is supplied the returned options will be grouped by the returned keys. + """ def lookup(x): - """ - Looks up options for object, including plot defaults. + """Looks up options for object, including plot defaults. keyfn determines returned key otherwise None key is used. + """ options = cls.lookup_options(x, opt_type) selected = {o: options.options[o] @@ -929,13 +931,13 @@ def lookup(x): return options if keyfn else options.get(None, {}) def _get_projection(cls, obj): - """ - Uses traversal to find the appropriate projection + """Uses traversal to find the appropriate projection for a nested object. Respects projections set on Overlays before considering Element based settings, before finally looking up the default projection on the plot type. If more than one non-None projection type is found an exception is raised. + """ isoverlay = lambda x: isinstance(x, CompositeOverlay) element3d = obj.traverse(lambda x: x, [Element3D]) @@ -959,8 +961,8 @@ def update(self, key): return item def __len__(self): - """ - Returns the total number of available frames. + """Returns the total number of available frames. + """ return len(self.keys) @@ -974,8 +976,8 @@ def _sources_match(src1, src2): return src1 is src2 or (src1._plot_id is not None and src1._plot_id == src2._plot_id) def _matching_plot_type(self, element): - """ - Checks if the plot type matches the element type. + """Checks if the plot type matches the element type. + """ return ( (not isinstance(element, CompositeOverlay) or isinstance(self, GenericOverlayPlot) or self.batched) and @@ -983,9 +985,9 @@ def _matching_plot_type(self, element): ) def _construct_callbacks(self): - """ - Initializes any callbacks for streams which have defined + """Initializes any callbacks for streams which have defined the plotted object as a source. + """ source_streams = [] cb_classes = set() @@ -1019,7 +1021,9 @@ def _construct_callbacks(self): @property def link_sources(self): - "Returns potential Link or Stream sources." + """Returns potential Link or Stream sources. + + """ if isinstance(self, GenericOverlayPlot): zorders = [] elif self.batched: @@ -1045,10 +1049,10 @@ def link_sources(self): class GenericElementPlot(DimensionedPlot): - """ - Plotting baseclass to render contents of an Element. Implements + """Plotting baseclass to render contents of an Element. Implements methods to get the correct frame given a HoloMap, axis labels and extents and titles. + """ apply_ranges = param.Boolean(default=True, doc=""" @@ -1278,9 +1282,9 @@ def __init__(self, element, keys=None, ranges=None, dimensions=None, def get_zorder(self, overlay, key, el): - """ - Computes the z-order of element in the NdOverlay + """Computes the z-order of element in the NdOverlay taking into account possible batching of elements. + """ spec = util.get_overlay_spec(overlay, key, el) return self.ordering.index(spec) @@ -1292,12 +1296,12 @@ def _updated_zorders(self, overlay): return [self.ordering.index(spec) for spec in specs] def _get_axis_dims(self, element): - """ - Returns the dimensions corresponding to each axis. + """Returns the dimensions corresponding to each axis. Should return a list of dimensions or list of lists of dimensions, which will be formatted to label the axis and to link axes. + """ dims = element.dimensions()[:2] if len(dims) == 1: @@ -1331,8 +1335,8 @@ def _get_frame(self, key): return frame def _execute_hooks(self, element): - """ - Executes finalize hooks + """Executes finalize hooks + """ for hook in self.hooks: try: @@ -1342,13 +1346,13 @@ def _execute_hooks(self, element): f"applied:\n\n {e}") def get_aspect(self, xspan, yspan): - """ - Should define the aspect ratio of the plot. + """Should define the aspect ratio of the plot. + """ def get_padding(self, obj, extents): - """ - Computes padding along the axes taking into account the plot aspect. + """Computes padding along the axes taking into account the plot aspect. + """ (x0, y0, z0, x1, y1, z1) = extents padding_opt = self.lookup_options(obj, 'plot').kwargs.get('padding') @@ -1450,8 +1454,7 @@ def _get_range_extents(self, element, ranges, range_type, xdim, ydim, zdim): return (x0, y0, x1, y1) def get_extents(self, element, ranges, range_type='combined', dimension=None, xdim=None, ydim=None, zdim=None, lims_as_soft_ranges=False, **kwargs): - """ - Gets the extents for the axes from the current Element. The globally + """Gets the extents for the axes from the current Element. The globally computed ranges can optionally override the extents. The extents are computed by combining the data ranges, extents @@ -1476,6 +1479,7 @@ def get_extents(self, element, ranges, range_type='combined', dimension=None, xd This is used e.g. when apply_hard_bounds is True and xlim/ylim is set, in which case we limit the initial viewable range to xlim/ylim, but allow navigation up to the abs max between the data range and xlim/ylim. + """ num = 6 if (isinstance(self.projection, str) and self.projection == '3d') else 4 if self.apply_extents and range_type in ('combined', 'extents'): @@ -1584,9 +1588,9 @@ def _format_title_components(self, key, dimensions=True, separator='\n'): return (label, group, type_name, dim_title) def _parse_backend_opt(self, opt, plot, model_accessor_aliases): - """ - Parses a custom option of the form 'model.accessor.option' + """Parses a custom option of the form 'model.accessor.option' and returns the corresponding model and accessor. + """ accessors = opt.split('.') if len(accessors) < 2: @@ -1706,22 +1710,22 @@ def _parse_backend_opt(self, opt, plot, model_accessor_aliases): return model, attr_accessor def update_frame(self, key, ranges=None): - """ - Set the plot(s) to the given frame number. Operates by + """Set the plot(s) to the given frame number. Operates by manipulating the matplotlib objects held in the self._handles dictionary. If n is greater than the number of available frames, update using the last available frame. + """ class GenericOverlayPlot(GenericElementPlot): - """ - Plotting baseclass to render (Nd)Overlay objects. It implements + """Plotting baseclass to render (Nd)Overlay objects. It implements methods to handle the creation of ElementPlots, coordinating style groupings and zorder for all layers across a HoloMap. It also allows collapsing of layers via the Compositor. + """ batched = param.Boolean(default=True, doc=""" @@ -1774,10 +1778,10 @@ def __init__(self, overlay, ranges=None, batched=True, keys=None, group_counter= [GenericElementPlot]) def _apply_compositor(self, holomap, ranges=None, keys=None, dimensions=None): - """ - Given a HoloMap compute the appropriate (mapwise or framewise) + """Given a HoloMap compute the appropriate (mapwise or framewise) ranges in order to apply the Compositor collapse operations in display mode (data collapse should already have happened). + """ # Compute framewise normalization defaultdim = holomap.ndims == 1 and holomap.kdims[0].name != 'Frame' @@ -1942,9 +1946,9 @@ def _match_subplot(self, key, subplot, items, element): return idx, spec, exact def _create_dynamic_subplots(self, key, items, ranges, **init_kwargs): - """ - Handles the creation of new subplots when a DynamicMap returns + """Handles the creation of new subplots when a DynamicMap returns a changing set of elements in an Overlay. + """ length = self.style_grouping group_fn = lambda x: (x.type.__name__, x.last.group, x.last.label) @@ -1963,12 +1967,11 @@ def _create_dynamic_subplots(self, key, items, ranges, **init_kwargs): self.dynamic_subplots.append(subplot) def _update_subplot(self, subplot, spec): - """ - Updates existing subplots when the subplot has been assigned + """Updates existing subplots when the subplot has been assigned to plot an element that is not an exact match to the object it was initially assigned. - """ + """ # See if the precise spec has already been assigned a cyclic # index otherwise generate a new one if spec in self.cyclic_index_lookup: @@ -1986,8 +1989,8 @@ def _update_subplot(self, subplot, spec): subplot.overlay_dims = dict(new_dims) def _get_subplot_extents(self, overlay, ranges, range_type, dimension=None): - """ - Iterates over all subplots and collects the extents of each. + """Iterates over all subplots and collects the extents of each. + """ if range_type == 'combined': extents = {'extents': [], 'soft': [], 'hard': [], 'data': []} @@ -2102,15 +2105,15 @@ def __init__(self, layout, keys=None, dimensions=None, **params): self._link_dimensioned_streams() def _link_dimensioned_streams(self): - """ - Should perform any linking required to update titles when dimensioned + """Should perform any linking required to update titles when dimensioned streams change. + """ def _get_frame(self, key): - """ - Creates a clone of the Layout with the nth-frame for each + """Creates a clone of the Layout with the nth-frame for each Element. + """ cached = self.current_key is None layout_frame = self.layout.clone(shared_data=False) @@ -2140,9 +2143,9 @@ def _format_title_components(self, key, dimensions=True, separator='\n'): class GenericLayoutPlot(GenericCompositePlot): - """ - A GenericLayoutPlot accepts either a Layout or a NdLayout and + """A GenericLayoutPlot accepts either a Layout or a NdLayout and displays the elements in a cartesian grid in scanline order. + """ transpose = param.Boolean(default=False, doc=""" @@ -2164,11 +2167,11 @@ def __init__(self, layout, **params): class GenericAdjointLayoutPlot(Plot): - """ - AdjointLayoutPlot allows placing up to three Views in a number of + """AdjointLayoutPlot allows placing up to three Views in a number of predefined and fixed layouts, which are defined by the layout_dict class attribute. This allows placing subviews next to a main plot in either a 'top' or 'right' position. + """ layout_dict = {'Single': {'positions': ['main']}, diff --git a/holoviews/plotting/plotly/callbacks.py b/holoviews/plotting/plotly/callbacks.py index 5f38f24c42..35c86d74e8 100644 --- a/holoviews/plotting/plotly/callbacks.py +++ b/holoviews/plotting/plotly/callbacks.py @@ -16,11 +16,11 @@ class PlotlyCallbackMetaClass(type): - """ - Metaclass for PlotlyCallback classes. + """Metaclass for PlotlyCallback classes. We want each callback class to keep track of all of the instances of the class. Using a meta class here lets us keep the logic for instance tracking in one place. + """ def __init__(cls, name, bases, attrs): diff --git a/holoviews/plotting/plotly/dash.py b/holoviews/plotting/plotly/dash.py index 74a3c1b3f7..6656e7cc04 100644 --- a/holoviews/plotting/plotly/dash.py +++ b/holoviews/plotting/plotly/dash.py @@ -73,16 +73,18 @@ def get_layout_ranges(plot): def plot_to_figure( plot, reset_nclicks=0, layout_ranges=None, responsive=True, use_ranges=True ): - """ - Convert a HoloViews plotly plot to a plotly.py Figure. - - Args: - plot: A HoloViews plotly plot object - reset_nclicks: Number of times a reset button associated with the plot has been - clicked - - Returns: - A plotly.py Figure + """Convert a HoloViews plotly plot to a plotly.py Figure. + + Parameters + ---------- + plot : A HoloViews plotly plot object + reset_nclicks : int + Number of times a reset button associated with the plot has been + clicked + + Returns + ------- + A plotly.py Figure """ fig_dict = plot.state clean_internal_figure_properties(fig_dict) @@ -120,18 +122,19 @@ def plot_to_figure( def to_function_spec(hvobj): - """ - Convert Dynamic HoloViews object into a pure function that accepts kdim values + """Convert Dynamic HoloViews object into a pure function that accepts kdim values and stream contents as positional arguments. This borrows the low-level holoviews decollate logic, but instead of returning DynamicMap with cloned streams, returns a HoloViewsFunctionSpec. - Args: - hvobj: A potentially dynamic Holoviews object + Parameters + ---------- + hvobj : A potentially dynamic Holoviews object - Returns: - HoloViewsFunctionSpec + Returns + ------- + HoloViewsFunctionSpec """ kdims_list = [] original_streams = [] @@ -166,15 +169,18 @@ def to_function_spec(hvobj): def populate_store_with_stream_contents( store_data, streams ): - """ - Add contents of streams to the store dictionary - - Args: - store_data: The store dictionary - streams: List of streams whose contents should be added to the store - - Returns: - None + """Add contents of streams to the store dictionary + + Parameters + ---------- + store_data + The store dictionary + streams + List of streams whose contents should be added to the store + + Returns + ------- + None """ for stream in streams: # Add stream @@ -186,14 +192,16 @@ def populate_store_with_stream_contents( def build_derived_callback(derived_stream): - """ - Build StreamCallback for Derived stream + """Build StreamCallback for Derived stream - Args: - derived_stream: A Derived stream + Parameters + ---------- + derived_stream + A Derived stream - Returns: - StreamCallback + Returns + ------- + StreamCallback """ input_ids = [id(stream) for stream in derived_stream.input_streams] constants = copy.copy(derived_stream.constants) @@ -208,14 +216,16 @@ def derived_callback(*stream_values): def build_history_callback(history_stream): - """ - Build StreamCallback for History stream + """Build StreamCallback for History stream - Args: - history_stream: A History stream + Parameters + ---------- + history_stream + A History stream - Returns: - StreamCallback + Returns + ------- + StreamCallback """ history_id = id(history_stream) input_stream_id = id(history_stream.input_stream) @@ -233,21 +243,25 @@ def history_callback(prior_value, input_value): def populate_stream_callback_graph(stream_callbacks, streams): - """ - Populate the stream_callbacks dict with StreamCallback instances + """Populate the stream_callbacks dict with StreamCallback instances associated with all of the History and Derived streams in input stream list. Input streams to any History or Derived streams are processed recursively - Args: - stream_callbacks: dict from id(stream) to StreamCallbacks the should - be populated. Order will be a breadth-first traversal of the provided - streams list, and any input streams that these depend on. + Parameters + ---------- + stream_callbacks + dict from id(stream) to StreamCallbacks that should + be populated. + Order will be a breadth-first traversal of the provided + streams list, and any input streams that these depend on. - streams: List of streams to build StreamCallbacks from + streams + List of streams to build StreamCallbacks from - Returns: - None + Returns + ------- + None """ for stream in streams: if isinstance(stream, Derived): @@ -263,31 +277,33 @@ def populate_stream_callback_graph(stream_callbacks, streams): def encode_store_data(store_data): - """ - Encode store_data dict into a JSON serializable dict + """Encode store_data dict into a JSON serializable dict This is currently done by pickling store_data and converting to a base64 encoded string. If HoloViews supports JSON serialization in the future, this method could be updated to use this approach instead - Args: - store_data: dict potentially containing HoloViews objects + Parameters + ---------- + store_data : dict potentially containing HoloViews objects - Returns: - dict that can be JSON serialized + Returns + ------- + dict that can be JSON serialized """ return {"pickled": base64.b64encode(pickle.dumps(store_data)).decode("utf-8")} def decode_store_data(store_data): - """ - Decode a dict that was encoded by the encode_store_data function. + """Decode a dict that was encoded by the encode_store_data function. - Args: - store_data: dict that was encoded by encode_store_data + Parameters + ---------- + store_data : dict that was encoded by encode_store_data - Returns: - decoded dict + Returns + ------- + decoded dict """ return pickle.loads(base64.b64decode(store_data["pickled"])) @@ -296,40 +312,48 @@ def to_dash( app, hvobjs, reset_button=False, graph_class=dcc.Graph, button_class=html.Button, responsive="width", use_ranges=True, ): - """ - Build Dash components and callbacks from a collection of HoloViews objects - - Args: - app: dash.Dash application instance - hvobjs: List of HoloViews objects to build Dash components from - reset_button: If True, construct a Button component that, which clicked, will - reset the interactive stream values associated with the provided HoloViews - objects to their initial values. Defaults to False. - graph_class: Class to use when creating Graph components, one of dcc.Graph - (default) or ddk.Graph. - button_class: Class to use when creating reset button component. - E.g. html.Button (default) or dbc.Button - responsive: If True graphs will fill their containers width and height - responsively. If False, graphs will have a fixed size matching their - HoloViews size. If "width" (default), the width is responsive but - height matches the HoloViews size. If "height", the height is responsive - but the width matches the HoloViews size. - use_ranges: If True, initialize graphs with the dimension ranges specified - in the HoloViews objects. If False, allow Dash to perform its own - auto-range calculations. - Returns: - DashComponents named tuple with properties: - - graphs: List of graph components (with type matching the input - graph_class argument) with order corresponding to the order - of the input hvobjs list. - - resets: List of reset buttons that can be used to reset figure state. - List has length 1 if reset_button=True and is empty if - reset_button=False. - - kdims: Dict from kdim names to Dash Components that can be used to - set the corresponding kdim value. - - store: dcc.Store the must be included in the app layout - - children: Single list of all components above. The order is graphs, - kdims, resets, and then the store. + """Build Dash components and callbacks from a collection of HoloViews objects + + Parameters + ---------- + app : dash.Dash application instance + hvobjs + List of HoloViews objects to build Dash components from + reset_button : bool + If True, construct a Button component that, when clicked, will + reset the interactive stream values associated with the provided HoloViews + objects to their initial values. Defaults to False. + graph_class + Class to use when creating Graph components, one of dcc.Graph + (default) or ddk.Graph. + button_class + Class to use when creating reset button component. + E.g. html.Button (default) or dbc.Button + responsive : bool, str + If True graphs will fill their containers width and height + responsively. If False, graphs will have a fixed size matching their + HoloViews size. If "width" (default), the width is responsive but + height matches the HoloViews size. If "height", the height is responsive + but the width matches the HoloViews size. + use_ranges : bool + If True, initialize graphs with the dimension ranges specified + in the HoloViews objects. If False, allow Dash to perform its own + auto-range calculations. + + Returns + ------- + DashComponents named tuple with properties: + - graphs: List of graph components (with type matching the input + graph_class argument) with order corresponding to the order + of the input hvobjs list. + - resets: List of reset buttons that can be used to reset figure state. + List has length 1 if reset_button=True and is empty if + reset_button=False. + - kdims: Dict from kdim names to Dash Components that can be used to + set the corresponding kdim value. + - store: dcc.Store the must be included in the app layout + - children: Single list of all components above. The order is graphs, + kdims, resets, and then the store. """ # Number of figures num_figs = len(hvobjs) @@ -644,17 +668,23 @@ def update_kdim_label(value, kdim_label=kdim_label): def update_stream_values_for_type(store_data, stream_event_data, uid_to_streams_for_type): - """ - Update the store with values of streams for a single type - - Args: - store_data: Current store dictionary - stream_event_data: Potential stream data for current plotly event and - traces in figures - uid_to_streams_for_type: Mapping from trace UIDs to HoloViews streams of - a particular type - Returns: - any_change: Whether any stream value has been updated + """Update the store with values of streams for a single type + + Parameters + ---------- + store_data + Current store dictionary + stream_event_data + Potential stream data for current plotly event and + traces in figures + uid_to_streams_for_type + Mapping from trace UIDs to HoloViews streams of + a particular type + + Returns + ------- + any_change + Whether any stream value has been updated """ any_change = False for uid, event_data in stream_event_data.items(): diff --git a/holoviews/plotting/plotly/element.py b/holoviews/plotting/plotly/element.py index d70191f367..97f6e7210d 100644 --- a/holoviews/plotting/plotly/element.py +++ b/holoviews/plotting/plotly/element.py @@ -124,8 +124,8 @@ def trace_kwargs(cls, **kwargs): return {} def initialize_plot(self, ranges=None, is_geo=False): - """ - Initializes a new plot object with the last available frame. + """Initializes a new plot object with the last available frame. + """ # Get element key and ranges for frame fig = self.generate_plot(self.keys[-1], ranges, is_geo=is_geo) @@ -273,16 +273,15 @@ def graph_options(self, element, ranges, style, is_geo=False, **kwargs): return opts def init_graph(self, datum, options, index=0, **kwargs): - """ - Initialize the plotly components that will represent the element + """Initialize the plotly components that will represent the element Parameters ---------- - datum: dict + datum : dict An element of the data list returned by the get_data method - options: dict + options : dict Graph options that were returned by the graph_options method - index: int + index : int Index of datum in the original list returned by the get_data method Returns @@ -316,8 +315,8 @@ def get_data(self, element, ranges, style, is_geo=False): def get_aspect(self, xspan, yspan): - """ - Computes the aspect ratio of the plot + """Computes the aspect ratio of the plot + """ return self.width/self.height @@ -328,6 +327,7 @@ def _get_axis_dims(self, element): Should return a list of dimensions or list of lists of dimensions, which will be formatted to label the axis and to link axes. + """ dims = element.dimensions()[:3] pad = [None]*max(3-len(dims), 0) @@ -382,8 +382,8 @@ def _apply_transforms(self, element, ranges, style): return new_style def _format_title(self, key, separator=' '): - """ - Formats the title of the plot. + """Formats the title of the plot. + """ title = super()._format_title(key, separator) @@ -575,9 +575,9 @@ def _get_ticks(self, axis, ticker): axis.update(axis_props) def update_frame(self, key, ranges=None, element=None, is_geo=False): - """ - Updates an existing plot with data corresponding + """Updates an existing plot with data corresponding to the key. + """ self.generate_plot(key, ranges, element, is_geo=is_geo) @@ -687,8 +687,8 @@ class OverlayPlot(GenericOverlayPlot, ElementPlot): 'padding', 'xlabel', 'ylabel', 'zlabel', 'xlim', 'ylim', 'zlim'] def initialize_plot(self, ranges=None, is_geo=False): - """ - Initializes a new plot object with the last available frame. + """Initializes a new plot object with the last available frame. + """ # Get element key and ranges for frame return self.generate_plot(next(iter(self.hmap.data.keys())), ranges, is_geo=is_geo) diff --git a/holoviews/plotting/plotly/plot.py b/holoviews/plotting/plotly/plot.py index c46d659700..8b62c8e0e7 100644 --- a/holoviews/plotting/plotly/plot.py +++ b/holoviews/plotting/plotly/plot.py @@ -29,15 +29,17 @@ class PlotlyPlot(DimensionedPlot, CallbackPlot): @property def state(self): - """ - The plotting state that gets updated via the update method and + """The plotting state that gets updated via the update method and used by the renderer to generate output. + """ return self.handles['fig'] def _trigger_refresh(self, key): - "Triggers update to a plot on a refresh event" + """Triggers update to a plot on a refresh event + + """ if self.top_level: self.update(key) else: @@ -133,12 +135,12 @@ def _init_layout(self, layout): def _create_subplots(self, layout, positions, layout_dimensions, ranges, num=0): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ subplots = {} adjoint_clone = layout.clone(shared_data=False, id=layout.id) @@ -254,12 +256,12 @@ def __init__(self, layout, layout_type, subplots, **params): super().__init__(subplots=subplots, **params) def initialize_plot(self, ranges=None, is_geo=False): - """ - Plot all the views contained in the AdjointLayout Object using axes + """Plot all the views contained in the AdjointLayout Object using axes appropriate to the layout configuration. All the axes are supplied by LayoutPlot - the purpose of the call is to invoke subplots with correct options and styles and hide any empty axes as necessary. + """ return self.generate_plot(self.keys[-1], ranges, is_geo=is_geo) @@ -279,9 +281,9 @@ def generate_plot(self, key, ranges=None, is_geo=False): class GridPlot(PlotlyPlot, GenericCompositePlot): - """ - Plot a group of elements in a grid layout based on a GridSpace element + """Plot a group of elements in a grid layout based on a GridSpace element object. + """ hspacing = param.Number(default=15, bounds=(0, None)) diff --git a/holoviews/plotting/plotly/renderer.py b/holoviews/plotting/plotly/renderer.py index 3f4630041b..0e8afd8159 100644 --- a/holoviews/plotting/plotly/renderer.py +++ b/holoviews/plotting/plotly/renderer.py @@ -20,10 +20,9 @@ def _PlotlyHoloviewsPane(fig_dict, **kwargs): - """ - Custom Plotly pane constructor for use by the HoloViews Pane. - """ + """Custom Plotly pane constructor for use by the HoloViews Pane. + """ # Remove internal HoloViews properties clean_internal_figure_properties(fig_dict) @@ -72,9 +71,9 @@ class PlotlyRenderer(Renderer): @bothmethod def get_plot_state(self_or_cls, obj, doc=None, renderer=None, numpy_convert=False, **kwargs): - """ - Given a HoloViews Viewable return a corresponding figure dictionary. + """Given a HoloViews Viewable return a corresponding figure dictionary. Allows cleaning the dictionary of any internal properties that were added + """ fig_dict = super().get_plot_state(obj, renderer, **kwargs) config = fig_dict.get('config', {}) @@ -159,8 +158,8 @@ def plot_options(cls, obj, percent_size): @classmethod def load_nb(cls, inline=True): - """ - Loads the plotly notebook resources. + """Loads the plotly notebook resources. + """ import panel.models.plotly # noqa cls._loaded = True diff --git a/holoviews/plotting/plotly/selection.py b/holoviews/plotting/plotly/selection.py index 243cb0b30a..2fe383706c 100644 --- a/holoviews/plotting/plotly/selection.py +++ b/holoviews/plotting/plotly/selection.py @@ -4,8 +4,8 @@ class PlotlyOverlaySelectionDisplay(OverlaySelectionDisplay): - """ - Overlay selection display subclass for use with plotly backend + """Overlay selection display subclass for use with plotly backend + """ def _build_element_layer(self, element, layer_color, layer_alpha, **opts): diff --git a/holoviews/plotting/plotly/tiles.py b/holoviews/plotting/plotly/tiles.py index b5abefd76a..ca185d487b 100644 --- a/holoviews/plotting/plotly/tiles.py +++ b/holoviews/plotting/plotly/tiles.py @@ -88,7 +88,7 @@ def init_graph(self, datum, options, index=0, **kwargs): return {'traces': [datum], PLOTLY_MAP: options} def generate_plot(self, key, ranges, element=None, is_geo=False): - """ - Override to force is_geo to True + """Override to force is_geo to True + """ return super().generate_plot(key, ranges, element, is_geo=True) diff --git a/holoviews/plotting/plotly/util.py b/holoviews/plotting/plotly/util.py index 20538647c0..a6889a41e6 100644 --- a/holoviews/plotting/plotly/util.py +++ b/holoviews/plotting/plotly/util.py @@ -129,20 +129,21 @@ def _get_subplot_number(subplot_val): - """ - Extract the subplot number from a subplot value string. + """Extract the subplot number from a subplot value string. 'x3' -> 3 'polar2' -> 2 'scene' -> 1 'y' -> 1 - Note: the absence of a subplot number (e.g. 'y') is treated by plotly as + Note + ---- + The absence of a subplot number (e.g. 'y') is treated by plotly as a subplot number of 1 Parameters ---------- - subplot_val: str + subplot_val : str Subplot string value (e.g. 'scene4') Returns @@ -158,8 +159,7 @@ def _get_subplot_number(subplot_val): def _get_subplot_val_prefix(subplot_type): - """ - Get the subplot value prefix for a subplot type. For most subplot types + """Get the subplot value prefix for a subplot type. For most subplot types this is equal to the subplot type string itself. For example, a `scatter3d.scene` value of `scene2` is used to associate the scatter3d trace with the `layout.scene2` subplot. @@ -170,7 +170,7 @@ def _get_subplot_val_prefix(subplot_type): Parameters ---------- - subplot_type: str + subplot_type : str Subplot string value (e.g. 'scene4') Returns @@ -187,8 +187,7 @@ def _get_subplot_val_prefix(subplot_type): def _get_subplot_prop_name(subplot_type): - """ - Get the name of the trace property used to associate a trace with a + """Get the name of the trace property used to associate a trace with a particular subplot type. For most subplot types this is equal to the subplot type string. For example, the `scatter3d.scene` property is used to associate a `scatter3d` trace with a particular `scene` subplot. @@ -200,7 +199,7 @@ def _get_subplot_prop_name(subplot_type): Parameters ---------- - subplot_type: str + subplot_type : str Subplot string value (e.g. 'scene4') Returns @@ -215,8 +214,7 @@ def _get_subplot_prop_name(subplot_type): def _normalize_subplot_ids(fig): - """ - Make sure a layout subplot property is initialized for every subplot that + """Make sure a layout subplot property is initialized for every subplot that is referenced by a trace in the figure. For example, if a figure contains a `scatterpolar` trace with the `subplot` @@ -224,14 +222,15 @@ def _normalize_subplot_ids(fig): has a `polar3` property, and will initialize it to an empty dict if it does not - Note: This function mutates the input figure dict + Note + ---- + This function mutates the input figure dict Parameters ---------- - fig: dict + fig : dict A plotly figure dict """ - layout = fig.setdefault('layout', {}) for trace in fig.get('data', None): trace_type = trace.get('type', 'scatter') @@ -255,13 +254,12 @@ def _normalize_subplot_ids(fig): def _get_max_subplot_ids(fig): - """ - Given an input figure, return a dict containing the max subplot number + """Given an input figure, return a dict containing the max subplot number for each subplot type in the figure Parameters ---------- - fig: dict + fig : dict A plotly figure dict Returns @@ -309,20 +307,21 @@ def _get_max_subplot_ids(fig): def _offset_subplot_ids(fig, offsets): - """ - Apply offsets to the subplot id numbers in a figure. + """Apply offsets to the subplot id numbers in a figure. - Note: This function mutates the input figure dict + Notes + ----- + 1. This function mutates the input figure dict - Note: This function assumes that the normalize_subplot_ids function has + 2. This function assumes that the normalize_subplot_ids function has already been run on the figure, so that all layout subplot properties in use are explicitly present in the figure's layout. Parameters ---------- - fig: dict + fig : dict A plotly figure dict - offsets: dict + offsets : dict A dict from subplot types to the offset to be applied for each subplot type. This dict matches the form of the dict returned by get_max_subplot_ids @@ -423,34 +422,35 @@ def _offset_subplot_ids(fig, offsets): def _scale_translate(fig, scale_x, scale_y, translate_x, translate_y): - """ - Scale a figure and translate it to sub-region of the original + """Scale a figure and translate it to sub-region of the original figure canvas. - Note: If the input figure has a title, this title is converted into an + Notes + ----- + 1. If the input figure has a title, this title is converted into an annotation and scaled along with the rest of the figure. - Note: This function mutates the input fig dict + 2. This function mutates the input fig dict - Note: This function assumes that the normalize_subplot_ids function has + 3. This function assumes that the normalize_subplot_ids function has already been run on the figure, so that all layout subplot properties in use are explicitly present in the figure's layout. Parameters ---------- - fig: dict + fig : dict A plotly figure dict - scale_x: float + scale_x : float Factor by which to scale the figure in the x-direction. This will typically be a value < 1. E.g. a value of 0.5 will cause the resulting figure to be half as wide as the original. - scale_y: float + scale_y : float Factor by which to scale the figure in the y-direction. This will typically be a value < 1 - translate_x: float + translate_x : float Factor by which to translate the scaled figure in the x-direction in normalized coordinates. - translate_y: float + translate_y : float Factor by which to translate the scaled figure in the x-direction in normalized coordinates. """ @@ -547,20 +547,20 @@ def perform_scale_translate(obj): def merge_figure(fig, subfig): - """ - Merge a sub-figure into a parent figure + """Merge a sub-figure into a parent figure - Note: This function mutates the input fig dict, but it does not mutate + Note + ---- + This function mutates the input fig dict, but it does not mutate the subfig dict Parameters ---------- - fig: dict + fig : dict The plotly figure dict into which the sub figure will be merged - subfig: dict + subfig : dict The plotly figure dict that will be copied and then merged into `fig` """ - # traces data = fig.setdefault('data', []) data.extend(copy.deepcopy(subfig.get('data', []))) @@ -571,17 +571,18 @@ def merge_figure(fig, subfig): def merge_layout(obj, subobj): - """ - Merge layout objects recursively + """Merge layout objects recursively - Note: This function mutates the input obj dict, but it does not mutate + Note + ---- + This function mutates the input obj dict, but it does not mutate the subobj dict Parameters ---------- - obj: dict + obj : dict dict into which the sub-figure dict will be merged - subobj: dict + subobj : dict dict that sill be copied and merged into `obj` """ for prop, val in subobj.items(): @@ -605,16 +606,15 @@ def merge_layout(obj, subobj): def _compute_subplot_domains(widths, spacing): - """ - Compute normalized domain tuples for a list of widths and a subplot + """Compute normalized domain tuples for a list of widths and a subplot spacing value Parameters ---------- - widths: list of float + widths : list of float List of the desired widths of each subplot. The length of this list is also the specification of the number of desired subplots - spacing: float + spacing : float Spacing between subplots in normalized coordinates Returns @@ -647,30 +647,29 @@ def figure_grid(figures_grid, width=None, height=None ): - """ - Construct a figure from a 2D grid of sub-figures + """Construct a figure from a 2D grid of sub-figures Parameters ---------- - figures_grid: list of list of (dict or None) + figures_grid : list of list of (dict or None) 2D list of plotly figure dicts that will be combined in a grid to produce the resulting figure. None values maybe used to leave empty grid cells - row_spacing: float (default 50) + row_spacing : float (default 50) Vertical spacing between rows in the grid in pixels - column_spacing: float (default 50) + column_spacing : float (default 50) Horizontal spacing between columns in the grid in pixels coordinates - share_xaxis: bool (default False) + share_xaxis : bool (default False) Share x-axis between sub-figures in the same column. Also link all x-axes in the figure. This will only work if each sub-figure has a single x-axis - share_yaxis: bool (default False) + share_yaxis : bool (default False) Share y-axis between sub-figures in the same row. Also link all y-axes in the figure. This will only work if each subfigure has a single y-axis - width: int (default None) + width : int (default None) Final figure width. If not specified, width is the sum of the max width of the figures in each column - height: int (default None) + height : int (default None) Final figure width. If not specified, height is the sum of the max height of the figures in each row @@ -809,14 +808,18 @@ def figure_grid(figures_grid, def get_colorscale(cmap, levels=None, cmin=None, cmax=None): """Converts a cmap spec to a plotly colorscale - Args: - cmap: A recognized colormap by name or list of colors - levels: A list or integer declaring the color-levels - cmin: The lower bound of the color range - cmax: The upper bound of the color range + Parameters + ---------- + cmap : A recognized colormap by name or list of colors + levels : A list or integer declaring the color-levels + cmin + The lower bound of the color range + cmax + The upper bound of the color range - Returns: - A valid plotly colorscale + Returns + ------- + A valid plotly colorscale """ ncolors = levels if isinstance(levels, int) else None if isinstance(levels, list): @@ -852,21 +855,21 @@ def get_colorscale(cmap, levels=None, cmin=None, cmax=None): def configure_matching_axes_from_dims(fig, matching_prop='_dim'): - """ - Configure matching axes for a figure + """Configure matching axes for a figure - Note: This function mutates the input figure + Note + ---- + This function mutates the input figure Parameters ---------- - fig: dict + fig : dict The figure dictionary to process. - matching_prop: str + matching_prop : str The name of the axis property that should be used to determine that two axes should be matched together. If the property is missing or None, axes will not be matched """ - # Build mapping from matching properties to (axis, ref) tuples axis_map = {} @@ -897,15 +900,16 @@ def configure_matching_axes_from_dims(fig, matching_prop='_dim'): def clean_internal_figure_properties(fig): - """ - Remove all HoloViews internal properties (those with leading underscores) from the + """Remove all HoloViews internal properties (those with leading underscores) from the input figure. - Note: This function mutates the input figure + Note + ---- + This function mutates the input figure Parameters ---------- - fig: dict + fig : dict The figure dictionary to process. """ fig_props = list(fig) diff --git a/holoviews/plotting/renderer.py b/holoviews/plotting/renderer.py index 3aa406669a..9cbdb42758 100644 --- a/holoviews/plotting/renderer.py +++ b/holoviews/plotting/renderer.py @@ -1,6 +1,6 @@ -""" -Public API for all plotting renderers supported by HoloViews, +"""Public API for all plotting renderers supported by HoloViews, regardless of plotting package or backend. + """ import base64 import os @@ -90,8 +90,7 @@ """ class Renderer(Exporter): - """ - The job of a Renderer is to turn the plotting state held within + """The job of a Renderer is to turn the plotting state held within Plot classes into concrete, visual output in the form of the PNG, SVG, MP4 or WebM formats (among others). Note that a Renderer is a type of Exporter and must therefore follow the Exporter interface. @@ -100,6 +99,7 @@ class Renderer(Exporter): appropriate Plot classes associated with that renderer in order to generate output. The process of 'drawing' is execute by the Plots and the Renderer turns the final plotting state into output. + """ center = param.Boolean(default=True, doc=""" @@ -201,8 +201,8 @@ def __call__(self, obj, fmt='auto', **kwargs): @bothmethod def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): - """ - Given a HoloViews Viewable return a corresponding plot instance. + """Given a HoloViews Viewable return a corresponding plot instance. + """ if isinstance(obj, DynamicMap) and obj.unbounded: dims = ', '.join(f'{dim!r}' for dim in obj.unbounded) @@ -266,17 +266,17 @@ def get_plot(self_or_cls, obj, doc=None, renderer=None, comm=None, **kwargs): @bothmethod def get_plot_state(self_or_cls, obj, renderer=None, **kwargs): - """ - Given a HoloViews Viewable return a corresponding plot state. + """Given a HoloViews Viewable return a corresponding plot state. + """ if not isinstance(obj, Plot): obj = self_or_cls.get_plot(obj=obj, renderer=renderer, **kwargs) return obj.state def _validate(self, obj, fmt, **kwargs): - """ - Helper method to be used in the __call__ method to get a + """Helper method to be used in the __call__ method to get a suitable plot or widget object and the appropriate format. + """ if isinstance(obj, Viewable): return obj, 'html' @@ -314,8 +314,8 @@ def _validate(self, obj, fmt, **kwargs): return plot, fmt def _apply_post_render_hooks(self, data, obj, fmt): - """ - Apply the post-render hooks to the data. + """Apply the post-render hooks to the data. + """ hooks = self.post_render_hooks.get(fmt,[]) for hook in hooks: @@ -327,10 +327,10 @@ def _apply_post_render_hooks(self, data, obj, fmt): return data def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): - """ - Renders plot or data structure and wraps the output in HTML. + """Renders plot or data structure and wraps the output in HTML. The comm argument defines whether the HTML output includes code to initialize a Comm, if the plot supplies one. + """ plot, fmt = self._validate(obj, fmt) figdata, _ = self(plot, fmt, **kwargs) @@ -366,10 +366,10 @@ def html(self, obj, fmt=None, css=None, resources='CDN', **kwargs): return html def components(self, obj, fmt=None, comm=True, **kwargs): - """ - Returns data and metadata dictionaries containing HTML and JS + """Returns data and metadata dictionaries containing HTML and JS components to include render in app, notebook, or standalone document. + """ if isinstance(obj, Plot): plot = obj @@ -437,10 +437,10 @@ def _render_ipywidget(self, plot): return data, {} def static_html(self, obj, fmt=None, template=None): - """ - Generates a static HTML with the rendered object in the + """Generates a static HTML with the rendered object in the supplied format. Allows supplying a template formatting string with fields to interpolate 'js', 'css' and the main 'html'. + """ html_bytes = StringIO() self.save(obj, html_bytes, fmt) @@ -465,13 +465,13 @@ def get_widget(self_or_cls, plot, widget_type, **kwargs): @bothmethod def export_widgets(self_or_cls, obj, filename, fmt=None, template=None, json=False, json_path='', **kwargs): - """ - Render and export object as a widget to a static HTML + """Render and export object as a widget to a static HTML file. Allows supplying a custom template formatting string with fields to interpolate 'js', 'css' and the main 'html' containing the widget. Also provides options to export widget data to a json file in the supplied json_path (defaults to current path). + """ if fmt not in [*self_or_cls.widgets, "auto", None]: raise ValueError("Renderer.export_widget may only export " @@ -490,8 +490,7 @@ def _widget_kwargs(self_or_cls): @bothmethod def app(self_or_cls, plot, show=False, new_window=False, websocket_origin=None, port=0): - """ - Creates a bokeh app from a HoloViews object or plot. By + """Creates a bokeh app from a HoloViews object or plot. By default simply attaches the plot to bokeh's curdoc and returns the Document, if show option is supplied creates an Application instance and displays it either in a browser @@ -501,6 +500,7 @@ def app(self_or_cls, plot, show=False, new_window=False, websocket_origin=None, websocket origin is required when launching from an existing tornado server (such as the notebook) and it is not on the default port ('localhost:8888'). + """ if isinstance(plot, HoloViewsPane): pane = plot @@ -515,10 +515,10 @@ def app(self_or_cls, plot, show=False, new_window=False, websocket_origin=None, @bothmethod def server_doc(self_or_cls, obj, doc=None): - """ - Get a bokeh Document with the plot attached. May supply + """Get a bokeh Document with the plot attached. May supply an existing doc, otherwise bokeh.io.curdoc() is used to attach the plot to the global document instance. + """ if not isinstance(obj, HoloViewsPane): obj = HoloViewsPane(obj, renderer=self_or_cls, backend=self_or_cls.backend, @@ -527,9 +527,9 @@ def server_doc(self_or_cls, obj, doc=None): @classmethod def plotting_class(cls, obj): - """ - Given an object or Element class, return the suitable plotting + """Given an object or Element class, return the suitable plotting class needed to render it with the current renderer. + """ if isinstance(obj, AdjointLayout) or obj is AdjointLayout: obj = Layout @@ -547,22 +547,22 @@ class needed to render it with the current renderer. @classmethod def plot_options(cls, obj, percent_size): - """ - Given an object and a percentage size (as supplied by the + """Given an object and a percentage size (as supplied by the %output magic) return all the appropriate plot options that would be used to instantiate a plot class for that element. Default plot sizes at the plotting class level should be taken into account. + """ raise NotImplementedError @bothmethod def save(self_or_cls, obj, basename, fmt='auto', key=None, info=None, options=None, resources='inline', title=None, **kwargs): - """ - Save a HoloViews object to file, either using an explicitly + """Save a HoloViews object to file, either using an explicitly supplied format or to the appropriate default. + """ if info is None: info = {} @@ -613,41 +613,43 @@ def save(self_or_cls, obj, basename, fmt='auto', key=None, info=None, @bothmethod def _save_prefix(self_or_cls, ext): - "Hook to prefix content for instance JS when saving HTML" + """Hook to prefix content for instance JS when saving HTML + + """ return @bothmethod def get_size(self_or_cls, plot): - """ - Return the display size associated with a plot before + """Return the display size associated with a plot before rendering to any particular format. Used to generate appropriate HTML display. Returns a tuple of (width, height) in pixels. + """ raise NotImplementedError @classmethod @contextmanager def state(cls): - """ - Context manager to handle global state for a backend, + """Context manager to handle global state for a backend, allowing Plot classes to temporarily override that state. + """ yield @classmethod def validate(cls, options): - """ - Validate an options dictionary for the renderer. + """Validate an options dictionary for the renderer. + """ return options @classmethod def load_nb(cls, inline=False, reloading=False, enable_mathjax=False): - """ - Loads any resources required for display of plots + """Loads any resources required for display of plots in the Jupyter notebook + """ if PANEL_VERSION >= (1, 0, 2): load_notebook(inline, reloading=reloading, enable_mathjax=enable_mathjax) @@ -670,8 +672,8 @@ def load_nb(cls, inline=False, reloading=False, enable_mathjax=False): @classmethod def _delete_plot(cls, plot_id): - """ - Deletes registered plots and calls Plot.cleanup + """Deletes registered plots and calls Plot.cleanup + """ plot = cls._plots.get(plot_id) if plot is None: diff --git a/holoviews/plotting/util.py b/holoviews/plotting/util.py index c514475bc8..66b6f35397 100644 --- a/holoviews/plotting/util.py +++ b/holoviews/plotting/util.py @@ -40,9 +40,9 @@ def displayable(obj): - """ - Predicate that returns whether the object is displayable or not + """Predicate that returns whether the object is displayable or not (i.e. whether the object obeys the nesting hierarchy) + """ if isinstance(obj, Overlay) and any(isinstance(o, (HoloMap, GridSpace, AdjointLayout)) for o in obj): @@ -111,16 +111,16 @@ def collate(obj): def isoverlay_fn(obj): - """ - Determines whether object is a DynamicMap returning (Nd)Overlay types. + """Determines whether object is a DynamicMap returning (Nd)Overlay types. + """ return isinstance(obj, CompositeOverlay) or (isinstance(obj, DynamicMap) and (isinstance(obj.last, CompositeOverlay))) def overlay_depth(obj): - """ - Computes the depth of a DynamicMap overlay if it can be determined + """Computes the depth of a DynamicMap overlay if it can be determined otherwise return None. + """ if isinstance(obj, DynamicMap): if isinstance(obj.last, CompositeOverlay): @@ -133,8 +133,7 @@ def overlay_depth(obj): def compute_overlayable_zorders(obj, path=None): - """ - Traverses an overlayable composite container to determine which + """Traverses an overlayable composite container to determine which objects are associated with specific (Nd)Overlay layers by z-order, making sure to take DynamicMap Callables into account. Returns a mapping between the zorders of each layer and a @@ -142,6 +141,7 @@ def compute_overlayable_zorders(obj, path=None): Used to determine which overlaid subplots should be linked with Stream callbacks. + """ if path is None: path = [] @@ -211,9 +211,9 @@ def compute_overlayable_zorders(obj, path=None): def is_dynamic_overlay(dmap): - """ - Traverses a DynamicMap graph and determines if any components + """Traverses a DynamicMap graph and determines if any components were overlaid dynamically (i.e. by * on a DynamicMap). + """ if not isinstance(dmap, DynamicMap): return False @@ -224,14 +224,14 @@ def is_dynamic_overlay(dmap): def split_dmap_overlay(obj, depth=0): - """ - Splits a DynamicMap into the original component layers it was + """Splits a DynamicMap into the original component layers it was constructed from by traversing the graph to search for dynamically overlaid components (i.e. constructed by using * on a DynamicMap). Useful for assigning subplots of an OverlayPlot the streams that are responsible for driving their updates. Allows the OverlayPlot to determine if a stream update should redraw a particular subplot. + """ layers, streams = [], [] if isinstance(obj, DynamicMap): @@ -265,8 +265,8 @@ def split_dmap_overlay(obj, depth=0): def initialize_dynamic(obj): - """ - Initializes all DynamicMap objects contained by the object + """Initializes all DynamicMap objects contained by the object + """ dmaps = obj.traverse(lambda x: x, specs=[DynamicMap]) for dmap in dmaps: @@ -280,13 +280,18 @@ def initialize_dynamic(obj): def get_plot_frame(map_obj, key_map, cached=False): """Returns the current frame in a mapping given a key mapping. - Args: - obj: Nested Dimensioned object - key_map: Dictionary mapping between dimensions and key value - cached: Whether to allow looking up key in cache + Parameters + ---------- + obj + Nested Dimensioned object + key_map + Dictionary mapping between dimensions and key value + cached + Whether to allow looking up key in cache - Returns: - The item in the mapping corresponding to the supplied key. + Returns + ------- + The item in the mapping corresponding to the supplied key. """ if (map_obj.kdims and len(map_obj.kdims) == 1 and map_obj.kdims[0] == 'Frame' and not isinstance(map_obj, DynamicMap)): @@ -313,13 +318,18 @@ def get_nested_plot_frame(obj, key_map, cached=False): Replaces any HoloMap or DynamicMap in the nested data structure, with the item corresponding to the supplied key. - Args: - obj: Nested Dimensioned object - key_map: Dictionary mapping between dimensions and key value - cached: Whether to allow looking up key in cache + Parameters + ---------- + obj + Nested Dimensioned object + key_map + Dictionary mapping between dimensions and key value + cached + Whether to allow looking up key in cache - Returns: - Nested datastructure where maps are replaced with single frames + Returns + ------- + Nested datastructure where maps are replaced with single frames """ clone = obj.map(lambda x: x) @@ -335,8 +345,9 @@ def get_nested_plot_frame(obj, key_map, cached=False): def undisplayable_info(obj, html=False): - "Generate helpful message regarding an undisplayable object" + """Generate helpful message regarding an undisplayable object + """ collate = 'collate' if html else 'collate' info = "For more information, please consult the Composing Data tutorial (http://git.io/vtIQh)" if isinstance(obj, HoloMap): @@ -357,10 +368,10 @@ def undisplayable_info(obj, html=False): def compute_sizes(sizes, size_fn, scaling_factor, scaling_method, base_size): - """ - Scales point sizes according to a scaling factor, + """Scales point sizes according to a scaling factor, base size and size_fn, which will be applied before scaling. + """ if sizes.dtype.kind not in ('i', 'f'): return None @@ -377,9 +388,9 @@ def compute_sizes(sizes, size_fn, scaling_factor, scaling_method, base_size): def get_axis_padding(padding): - """ - Process a padding value supplied as a tuple or number and returns + """Process a padding value supplied as a tuple or number and returns padding values for x-, y- and z-axis. + """ if isinstance(padding, tuple): if len(padding) == 2: @@ -397,9 +408,9 @@ def get_axis_padding(padding): def get_minimum_span(low, high, span): - """ - If lower and high values are equal ensures they are separated by + """If lower and high values are equal ensures they are separated by the defined span. + """ if is_number(low) and low == high: if isinstance(low, np.datetime64): @@ -409,9 +420,9 @@ def get_minimum_span(low, high, span): def get_range(element, ranges, dimension): - """ - Computes the data, soft- and hard-range along a dimension given + """Computes the data, soft- and hard-range along a dimension given an element and a dictionary of ranges. + """ if dimension and dimension != 'categorical': if ranges and dimension.label in ranges: @@ -428,11 +439,11 @@ def get_range(element, ranges, dimension): def get_sideplot_ranges(plot, element, main, ranges): - """ - Utility to find the range for an adjoined + """Utility to find the range for an adjoined plot given the plot, the element, the Element the plot is adjoined to and the dictionary of ranges. + """ key = plot.current_key dims = element.dimensions() @@ -465,7 +476,9 @@ def get_sideplot_ranges(plot, element, main, ranges): def within_range(range1, range2): - """Checks whether range1 is within the range specified by range2.""" + """Checks whether range1 is within the range specified by range2. + + """ range1 = [r if isfinite(r) else None for r in range1] range2 = [r if isfinite(r) else None for r in range2] return ((range1[0] is None or range2[0] is None or range1[0] >= range2[0]) and @@ -487,7 +500,9 @@ def validate_unbounded_mode(holomaps, dynmaps): def get_dynamic_mode(composite): - "Returns the common mode of the dynamic maps in given composite object" + """Returns the common mode of the dynamic maps in given composite object + + """ dynmaps = composite.traverse(lambda x: x, [DynamicMap]) holomaps = composite.traverse(lambda x: x, ['HoloMap']) dynamic_unbounded = any(m.unbounded for m in dynmaps) @@ -500,8 +515,8 @@ def get_dynamic_mode(composite): def initialize_unbounded(obj, dimensions, key): - """ - Initializes any DynamicMaps in unbounded mode. + """Initializes any DynamicMaps in unbounded mode. + """ select = dict(zip([d.name for d in dimensions], key)) try: @@ -511,9 +526,9 @@ def initialize_unbounded(obj, dimensions, key): def dynamic_update(plot, subplot, key, overlay, items): - """ - Given a plot, subplot and dynamically generated (Nd)Overlay + """Given a plot, subplot and dynamically generated (Nd)Overlay find the closest matching Element for that plot. + """ match_spec = get_overlay_spec(overlay, wrap_tuple(key), @@ -528,9 +543,9 @@ def dynamic_update(plot, subplot, key, overlay, items): def map_colors(arr, crange, cmap, hex=True): - """ - Maps an array of values to RGB hex strings, given + """Maps an array of values to RGB hex strings, given a color range and colormap. + """ if isinstance(crange, arraylike_types): xsorted = np.argsort(crange) @@ -551,8 +566,8 @@ def map_colors(arr, crange, cmap, hex=True): def resample_palette(palette, ncolors, categorical, cmap_categorical): - """ - Resample the number of colors in a palette to the selected number. + """Resample the number of colors in a palette to the selected number. + """ if len(palette) != ncolors: if categorical and cmap_categorical: @@ -565,8 +580,8 @@ def resample_palette(palette, ncolors, categorical, cmap_categorical): def mplcmap_to_palette(cmap, ncolors=None, categorical=False): - """ - Converts a matplotlib colormap to palette of RGB hex strings." + """Converts a matplotlib colormap to palette of RGB hex strings." + """ import matplotlib as mpl from matplotlib.colors import Colormap, ListedColormap @@ -662,8 +677,8 @@ def bokeh_palette_to_palette(cmap, ncolors=None, categorical=False): def linear_gradient(start_hex, finish_hex, n=10): - """ - Interpolates the color gradient between to hex colors + """Interpolates the color gradient between to hex colors + """ s = hex2rgb(start_hex) f = hex2rgb(finish_hex) @@ -675,8 +690,8 @@ def linear_gradient(start_hex, finish_hex, n=10): def polylinear_gradient(colors, n): - """ - Interpolates the color gradients between a list of hex colors. + """Interpolates the color gradients between a list of hex colors. + """ n_out = int(float(n) / (len(colors)-1)) gradient = linear_gradient(colors[0], colors[1], n_out) @@ -696,10 +711,10 @@ def polylinear_gradient(colors, n): def _list_cmaps(provider=None, records=False): - """ - List available colormaps by combining matplotlib, bokeh, and + """List available colormaps by combining matplotlib, bokeh, and colorcet colormaps or palettes if available. May also be narrowed down to a particular provider or list of providers. + """ if provider is None: provider = providers @@ -754,8 +769,7 @@ def info(provider,names): def register_cmaps(category, provider, source, bg, names): - """ - Maintain descriptions of colormaps that include the following information: + """Maintain descriptions of colormaps that include the following information: name - string name for the colormap category - intended use or purpose, mostly following matplotlib @@ -763,6 +777,7 @@ def register_cmaps(category, provider, source, bg, names): source - original source or creator of the colormaps bg - base/background color expected for the map ('light','dark','medium','any' (unknown or N/A)) + """ for name in names: bisect.insort(cmap_info, CMapInfo(name=name, provider=provider, @@ -772,8 +787,8 @@ def register_cmaps(category, provider, source, bg, names): def list_cmaps(provider=None, records=False, name=None, category=None, source=None, bg=None, reverse=None): - """ - Return colormap names matching the specified filters. + """Return colormap names matching the specified filters. + """ # Only uses names actually imported and currently available available = _list_cmaps(provider=provider, records=True) @@ -922,8 +937,8 @@ def list_cmaps(provider=None, records=False, name=None, category=None, source=No def process_cmap(cmap, ncolors=None, provider=None, categorical=False): - """ - Convert valid colormap specifications to a list of colors. + """Convert valid colormap specifications to a list of colors. + """ providers_checked="matplotlib, bokeh, or colorcet" if provider is None else provider @@ -959,27 +974,26 @@ def process_cmap(cmap, ncolors=None, provider=None, categorical=False): def color_intervals(colors, levels, clip=None, N=255): - """ - Maps the supplied colors into bins defined by the supplied levels. + """Maps the supplied colors into bins defined by the supplied levels. If a clip tuple is defined the bins are clipped to the defined range otherwise the range is computed from the levels and returned. - Arguments - --------- - colors: list + Parameters + ---------- + colors : list List of colors (usually hex string or named colors) - levels: list or array_like + levels : list or array_like Levels specifying the bins to map the colors to - clip: tuple (optional) + clip : tuple, optional Lower and upper limits of the color range - N: int + N : int Number of discrete colors to map the range onto Returns ------- - cmap: list + cmap : list List of colors - clip: tuple + clip : tuple Lower and upper bounds of the color range """ if len(colors) != len(levels)-1: @@ -1006,16 +1020,16 @@ def color_intervals(colors, levels, clip=None, N=255): def dim_axis_label(dimensions, separator=', '): - """ - Returns an axis label for one or more dimensions. + """Returns an axis label for one or more dimensions. + """ if not isinstance(dimensions, list): dimensions = [dimensions] return separator.join([d.pprint_label for d in dimensions]) def scale_fontsize(size, scaling): - """ - Scales a numeric or string font size. + """Scales a numeric or string font size. + """ ext = None if isinstance(size, str): @@ -1036,8 +1050,8 @@ def scale_fontsize(size, scaling): def attach_streams(plot, obj, precedence=1.1): - """ - Attaches plot refresh to all streams on the object. + """Attaches plot refresh to all streams on the object. + """ def append_refresh(dmap): for stream in get_nested_streams(dmap): @@ -1047,16 +1061,16 @@ def append_refresh(dmap): def traverse_setter(obj, attribute, value): - """ - Traverses the object and sets the supplied attribute on the + """Traverses the object and sets the supplied attribute on the object. Supports Dimensioned and DimensionedPlot types. + """ obj.traverse(lambda x: setattr(x, attribute, value)) def _get_min_distance_numpy(element): - """ - NumPy based implementation of get_min_distance + """NumPy based implementation of get_min_distance + """ xys = element.array([0, 1]) with warnings.catch_warnings(): @@ -1071,9 +1085,9 @@ def _get_min_distance_numpy(element): def get_min_distance(element): - """ - Gets the minimum sampling distance of the x- and y-coordinates + """Gets the minimum sampling distance of the x- and y-coordinates in a grid. + """ try: from scipy.spatial.distance import pdist @@ -1083,9 +1097,9 @@ def get_min_distance(element): def get_directed_graph_paths(element, arrow_length): - """ - Computes paths for a directed path which include an arrow to + """Computes paths for a directed path which include an arrow to indicate the directionality of each edge. + """ edgepaths = element._split_edgepaths edges = edgepaths.split(datatype='array', dimensions=edgepaths.kdims) @@ -1105,8 +1119,8 @@ def get_directed_graph_paths(element, arrow_length): def rgb2hex(rgb): - """ - Convert RGB(A) tuple to hex. + """Convert RGB(A) tuple to hex. + """ if len(rgb) > 3: rgb = rgb[:-1] @@ -1114,8 +1128,8 @@ def rgb2hex(rgb): def dim_range_key(eldim): - """ - Returns the key to look up a dimension range. + """Returns the key to look up a dimension range. + """ if isinstance(eldim, dim): dim_name = repr(eldim) @@ -1127,7 +1141,10 @@ def dim_range_key(eldim): def hex2rgb(hex): - ''' "#FFFFFF" -> [255,255,255] ''' + """Convert hex code to RGB integers + "#FFFFFF" -> [255,255,255] + + """ # Pass 16 to the integer function for change of base return [int(hex[i:i+2], 16) for i in range(1,6,2)] @@ -1142,7 +1159,9 @@ class apply_nodata(Operation): that it is transparent (by default) when plotted.""") def _replace_value(self, data): - "Replace `nodata` value in data with NaN, if specified in opts" + """Replace `nodata` value in data with NaN, if specified in opts + + """ data = data.astype('float64') mask = data!=self.p.nodata if hasattr(data, 'where'): @@ -1322,9 +1341,9 @@ def _process(self, element, key=None): class categorical_legend(Operation): - """ - Generates a Points element which contains information for generating + """Generates a Points element which contains information for generating a legend by inspecting the pipeline of a datashaded RGB element. + """ backend = param.String() @@ -1373,12 +1392,12 @@ def _process(self, element, key=None): class flatten_stack(Operation): - """ - Thin wrapper around datashader's shade operation to flatten + """Thin wrapper around datashader's shade operation to flatten ImageStacks into RGB elements. Used for the MPL and Plotly backends because these backends do not natively support ImageStacks, unlike Bokeh. + """ shade_params = param.Dict(default={}, doc=""" diff --git a/holoviews/pyodide.py b/holoviews/pyodide.py index 493f0c19ee..b237897fe9 100644 --- a/holoviews/pyodide.py +++ b/holoviews/pyodide.py @@ -49,9 +49,9 @@ def render_html(obj): return {'text/html': wrap_in_script_tag(script)}, {} def render_image(element, fmt): - """ - Used to render elements to an image format (svg or png) if requested + """Used to render elements to an image format (svg or png) if requested in the display formats. + """ if fmt not in Store.display_formats: return None diff --git a/holoviews/selection.py b/holoviews/selection.py index a5931797a1..eb0bcc8663 100644 --- a/holoviews/selection.py +++ b/holoviews/selection.py @@ -60,8 +60,7 @@ def transform_function(cls, stream_values, constants): ) class _base_link_selections(param.ParameterizedFunction): - """ - Baseclass for linked selection functions. + """Baseclass for linked selection functions. Subclasses override the _build_selection_streams class method to construct a _SelectionStreams namedtuple instance that includes the required streams @@ -70,6 +69,7 @@ class _base_link_selections(param.ParameterizedFunction): Subclasses also override the _expr_stream_updated method. This allows subclasses to control whether new selections override prior selections or whether they are combined with prior selections + """ link_inputs = param.Boolean(default=False, doc=""" @@ -104,9 +104,9 @@ def _update_mode(self, event): self.selection_mode = 'inverse' def _register(self, hvobj): - """ - Register an Element or DynamicMap that may be capable of generating + """Register an Element or DynamicMap that may be capable of generating selection expressions in response to user interaction events + """ from .element import Table @@ -150,9 +150,9 @@ def __call__(self, hvobj, **kwargs): return self._selection_transform(hvobj.clone()) def _selection_transform(self, hvobj, operations=()): - """ - Transform an input HoloViews object into a dynamic object with linked + """Transform an input HoloViews object into a dynamic object with linked selections enabled. + """ from .plotting.util import initialize_dynamic if isinstance(hvobj, DynamicMap): @@ -223,15 +223,14 @@ def compose(*args, **kwargs): @classmethod def _build_selection_streams(cls, inst): - """ - Subclasses should override this method to return a _SelectionStreams + """Subclasses should override this method to return a _SelectionStreams instance + """ raise NotImplementedError() def _expr_stream_updated(self, hvobj, selection_expr, bbox, region_element, **kwargs): - """ - Called when one of the registered HoloViews objects produces a new + """Called when one of the registered HoloViews objects produces a new selection expression. Subclasses should override this method, and they should use the input expression to update the `exprs_stream` property of the _SelectionStreams instance that was produced by @@ -240,16 +239,17 @@ def _expr_stream_updated(self, hvobj, selection_expr, bbox, region_element, **kw Subclasses have the flexibility to control whether the new selection express overrides previous selections, or whether it is combined with previous selections. + """ raise NotImplementedError() class link_selections(_base_link_selections): - """ - Operation which automatically links selections between elements + """Operation which automatically links selections between elements in the supplied HoloViews object. Can be used a single time or be used as an instance to apply the linked selections across multiple objects. + """ cross_filter_mode = param.Selector( @@ -314,16 +314,18 @@ def _update_pipes(self): pipe.event(data=sel_ds.data if raw else sel_ds) def selection_param(self, data): - """ - Returns a parameter which reflects the current selection + """Returns a parameter which reflects the current selection when applied to the supplied data, making it easy to create a callback which depends on the current selection. - Args: - data: A Dataset type or data which can be cast to a Dataset + Parameters + ---------- + data + A Dataset type or data which can be cast to a Dataset - Returns: - A parameter which reflects the current selection + Returns + ------- + A parameter which reflects the current selection """ raw = False if not isinstance(data, Dataset): @@ -334,16 +336,19 @@ def selection_param(self, data): return pipe.param.data def filter(self, data, selection_expr=None): - """ - Filters the provided data based on the current state of the + """Filters the provided data based on the current state of the current selection expression. - Args: - data: A Dataset type or data which can be cast to a Dataset - selection_expr: Optionally provide your own selection expression + Parameters + ---------- + data + A Dataset type or data which can be cast to a Dataset + selection_expr + Optionally provide your own selection expression - Returns: - The filtered data + Returns + ------- + The filtered data """ expr = self.selection_expr if selection_expr is None else selection_expr if expr is None: @@ -452,8 +457,8 @@ def update_colors(*_): @property def unselected_cmap(self): - """ - The datashader colormap for unselected data + """The datashader colormap for unselected data + """ if self.unselected_color is None: return None @@ -461,18 +466,18 @@ def unselected_cmap(self): @property def selected_cmap(self): - """ - The datashader colormap for selected data + """The datashader colormap for selected data + """ return None if self.selected_color is None else _color_to_cmap(self.selected_color) class SelectionDisplay: - """ - Base class for selection display classes. Selection display classes are + """Base class for selection display classes. Selection display classes are responsible for transforming an element (or DynamicMap that produces an element) into a HoloViews object that represents the current selection state. + """ def __call__(self, element): @@ -532,9 +537,9 @@ def _select(element, selection_expr, cache=None): class NoOpSelectionDisplay(SelectionDisplay): - """ - Selection display class that returns input element unchanged. For use with + """Selection display class that returns input element unchanged. For use with elements that don't support displaying selections. + """ def build_selection(self, selection_streams, hvobj, operations, region_stream=None, cache=None): @@ -542,9 +547,9 @@ def build_selection(self, selection_streams, hvobj, operations, region_stream=No class OverlaySelectionDisplay(SelectionDisplay): - """ - Selection display base class that represents selections by overlaying + """Selection display base class that represents selections by overlaying colored subsets on top of the original element in an Overlay container. + """ def __init__(self, color_prop='color', is_cmap=False, supports_region=True): @@ -643,9 +648,9 @@ def _style_region_element(self, region_element, unselected_cmap): class ColorListSelectionDisplay(SelectionDisplay): - """ - Selection display class for elements that support coloring by a + """Selection display class for elements that support coloring by a vectorized color list. + """ def __init__(self, color_prop='color', alpha_prop='alpha', backend=None): @@ -691,8 +696,8 @@ def _build_selection(el, colors, alpha, exprs, **kwargs): def _color_to_cmap(color): - """ - Create a light to dark cmap list from a base color + """Create a light to dark cmap list from a base color + """ from .plotting.util import linear_gradient # Lighten start color by interpolating toward white diff --git a/holoviews/streams.py b/holoviews/streams.py index 158bf91401..9266927d5e 100644 --- a/holoviews/streams.py +++ b/holoviews/streams.py @@ -1,7 +1,7 @@ -""" -The streams module defines the streams API that allows visualizations to +"""The streams module defines the streams API that allows visualizations to generate and respond to events, originating either in Python on the server-side or in Javascript in the Jupyter notebook (client-side). + """ import weakref @@ -38,12 +38,12 @@ class _SkipTrigger: pass @contextmanager def triggering_streams(streams): - """ - Temporarily declares the streams as being in a triggered state. + """Temporarily declares the streams as being in a triggered state. Needed by DynamicMap to determine whether to memoize on a Callable, i.e. if a stream has memoization disabled and is in triggered state Callable should disable lookup in the memoization cache. This is done by the dynamicmap_memoization context manager. + """ for stream in streams: stream._triggering = True @@ -55,7 +55,9 @@ def triggering_streams(streams): def streams_list_from_dict(streams): - "Converts a streams dictionary into a streams list" + """Converts a streams dictionary into a streams list + + """ params = {} for k, v in streams.items(): v = param.parameterized.transform_reference(v) @@ -67,8 +69,7 @@ def streams_list_from_dict(streams): class Stream(param.Parameterized): - """ - A Stream is simply a parameterized object with parameters that + """A Stream is simply a parameterized object with parameters that change over time in response to update events and may trigger downstream events on its subscribers. The Stream parameters can be updated using the update method, which will optionally trigger the @@ -96,6 +97,7 @@ class Stream(param.Parameterized): transform and reset method to preprocess parameters before they are passed to subscribers and reset them using custom logic respectively. + """ # Mapping from a source to a list of streams @@ -110,8 +112,7 @@ class Stream(param.Parameterized): @classmethod def define(cls, name, **kwargs): - """ - Utility to quickly and easily declare Stream classes. Designed + """Utility to quickly and easily declare Stream classes. Designed for interactive use such as notebooks and shouldn't replace parameterized class definitions in source code that is imported. @@ -121,6 +122,7 @@ def define(cls, name, **kwargs): type is inferred and declared, using the value as the default. Supported types: bool, int, float, str, dict, tuple and list + """ params = {'name': param.String(default=name)} for k, v in kwargs.items(): @@ -152,13 +154,13 @@ def define(cls, name, **kwargs): @classmethod def trigger(cls, streams): - """ - Given a list of streams, collect all the stream parameters into + """Given a list of streams, collect all the stream parameters into a dictionary and pass it to the union set of subscribers. Passing multiple streams at once to trigger can be useful when a subscriber may be set multiple times across streams but only needs to be called once. + """ # Union of stream contents items = [stream.contents.items() for stream in set(streams)] @@ -199,14 +201,16 @@ def trigger(cls, streams): def _on_trigger(self): - """Called when a stream has been triggered""" + """Called when a stream has been triggered + + """ @classmethod def _process_streams(cls, streams): - """ - Processes a list of streams promoting Parameterized objects and + """Processes a list of streams promoting Parameterized objects and methods to Param based streams. + """ parameterizeds = defaultdict(set) valid, invalid = [], [] @@ -255,8 +259,7 @@ def _process_streams(cls, streams): def __init__(self, rename=None, source=None, subscribers=None, linked=False, transient=False, **params): - """ - The rename argument allows multiple streams with similar event + """The rename argument allows multiple streams with similar event state to be used by remapping parameter names. Source is an optional argument specifying the HoloViews @@ -265,8 +268,8 @@ def __init__(self, rename=None, source=None, subscribers=None, linked=False, Some streams are configured to automatically link to the source plot, to disable this set linked=False - """ + """ # Source is stored as a weakref to allow it to be garbage collected if subscribers is None: subscribers = [] @@ -298,24 +301,28 @@ def __init__(self, rename=None, source=None, subscribers=None, linked=False, self.registry[source] = [self] def clone(self): - """Return new stream with identical properties and no subscribers""" + """Return new stream with identical properties and no subscribers + + """ return type(self)(**self.contents) @property def subscribers(self): - """Property returning the subscriber list""" + """Property returning the subscriber list + + """ return [s for p, s in sorted(self._subscribers, key=lambda x: x[0])] def clear(self, policy='all'): - """ - Clear all subscribers registered to this stream. + """Clear all subscribers registered to this stream. The default policy of 'all' clears all subscribers. If policy is set to 'user', only subscribers defined by the user are cleared (precedence between zero and one). A policy of 'internal' clears subscribers with precedence greater than unity used internally by HoloViews. + """ policies = ['all', 'user', 'internal'] if policy not in policies: @@ -330,8 +337,8 @@ def clear(self, policy='all'): def reset(self): - """ - Resets stream parameters to their defaults. + """Resets stream parameters to their defaults. + """ with util.disable_constant(self): for k, p in self.param.objects('existing').items(): @@ -340,8 +347,7 @@ def reset(self): def add_subscriber(self, subscriber, precedence=0): - """ - Register a callable subscriber to this stream which will be + """Register a callable subscriber to this stream which will be invoked either when event is called or when this stream is passed to the trigger classmethod. @@ -350,6 +356,7 @@ def add_subscriber(self, subscriber, precedence=0): between zero and one while HoloViews itself reserves the use of higher precedence values. Subscribers with high precedence are invoked later than ones with low precedence. + """ if not callable(subscriber): raise TypeError('Subscriber must be a callable.') @@ -368,11 +375,11 @@ def _validate_rename(self, mapping): def rename(self, **mapping): - """ - The rename method allows stream parameters to be allocated to + """The rename method allows stream parameters to be allocated to new names to avoid clashes with other stream parameters of the same name. Returns a new clone of the stream instance with the specified name mapping. + """ params = {k: v for k, v in self.param.values().items() if k != 'name'} return self.__class__(rename=mapping, @@ -405,10 +412,10 @@ def source(self, source): def transform(self): - """ - Method that can be overwritten by subclasses to process the + """Method that can be overwritten by subclasses to process the parameter values before renaming is applied. Returns a dictionary of transformed parameters. + """ return {} @@ -421,38 +428,38 @@ def contents(self): @property def hashkey(self): - """ - The object the memoization hash is computed from. By default + """The object the memoization hash is computed from. By default returns the stream contents but can be overridden to provide a custom hash key. + """ return self.contents def _set_stream_parameters(self, **kwargs): - """ - Sets the stream parameters which are expected to be declared + """Sets the stream parameters which are expected to be declared constant. + """ with util.disable_constant(self): self.param.update(**kwargs) def event(self, **kwargs): - """ - Update the stream parameters and trigger an event. + """Update the stream parameters and trigger an event. + """ skip = self.update(**kwargs) if skip is not _SkipTrigger: self.trigger([self]) def update(self, **kwargs): - """ - The update method updates the stream parameters (without any + """The update method updates the stream parameters (without any renaming applied) in response to some event. If the stream has a custom transform method, this is applied to transform the parameter values accordingly. To update and trigger, use the event method. + """ self._set_stream_parameters(**kwargs) transformed = self.transform() @@ -475,9 +482,9 @@ def __str__(self): class Counter(Stream): - """ - Simple stream that automatically increments an integer counter + """Simple stream that automatically increments an integer counter parameter every time it is updated. + """ counter = param.Integer(default=0, constant=True, bounds=(0, None)) @@ -487,10 +494,10 @@ def transform(self): class Pipe(Stream): - """ - A Stream used to pipe arbitrary data to a callback. + """A Stream used to pipe arbitrary data to a callback. Unlike other streams memoization can be disabled for a Pipe stream (and is disabled by default). + """ data = param.Parameter(default=None, constant=True, doc=""" @@ -501,9 +508,9 @@ def __init__(self, data=None, memoize=False, **params): self._memoize_counter = 0 def send(self, data): - """ - A convenience method to send an event with data without + """A convenience method to send an event with data without supplying a keyword. + """ self.event(data=data) @@ -516,8 +523,7 @@ def hashkey(self): class Buffer(Pipe): - """ - Buffer allows streaming and accumulating incoming chunks of rows + """Buffer allows streaming and accumulating incoming chunks of rows from tabular datasets. The data may be in the form of a pandas DataFrame, 2D arrays of rows and columns or dictionaries of column arrays. Buffer will accumulate the last N rows, where N is defined @@ -536,6 +542,7 @@ class Buffer(Pipe): subscribed to this stream will update the axis ranges when an update is pushed. This makes it possible to control whether zooming is allowed while streaming. + """ data = param.Parameter(default=None, constant=True, doc=""" @@ -586,7 +593,9 @@ def __init__(self, data, length=1000, index=True, following=True, **params): def verify(self, x): - """ Verify consistency of dataframes that pass through this stream """ + """Verify consistency of dataframes that pass through this stream + + """ if type(x) != type(self.data): # noqa: E721 raise TypeError(f"Input expected to be of type {type(self.data).__name__}, got {type(x).__name__}.") elif isinstance(x, np.ndarray): @@ -606,7 +615,9 @@ def verify(self, x): def clear(self): - "Clears the data in the stream" + """Clears the data in the stream + + """ if isinstance(self.data, np.ndarray): data = self.data[:, :0] elif isinstance(self.data, pd.DataFrame): @@ -619,9 +630,9 @@ def clear(self): def _concat(self, data): - """ - Concatenate and slice the accepted data types to the defined + """Concatenate and slice the accepted data types to the defined length. + """ if isinstance(data, np.ndarray): data_length = len(data) @@ -660,8 +671,8 @@ def _concat(self, data): def update(self, **kwargs): - """ - Overrides update to concatenate streamed data up to defined length. + """Overrides update to concatenate streamed data up to defined length. + """ data = kwargs.get('data') if data is not None: @@ -678,9 +689,9 @@ def hashkey(self): class Params(Stream): - """ - A Stream that watches the changes in the parameters of the supplied + """A Stream that watches the changes in the parameters of the supplied Parameterized objects and triggers when they change. + """ parameterized = param.ClassSelector(class_=(param.Parameterized, @@ -736,11 +747,14 @@ def unwatch(self): def from_params(cls, params, **kwargs): """Returns Params streams given a dictionary of parameters - Args: - params (dict): Dictionary of parameters + Parameters + ---------- + params : dict + Dictionary of parameters - Returns: - List of Params streams + Returns + ------- + List of Params streams """ key_fn = lambda x: id(x[1].owner) streams = [] @@ -819,10 +833,10 @@ def contents(self): class ParamMethod(Params): - """ - A Stream that watches the parameter dependencies on a method of + """A Stream that watches the parameter dependencies on a method of a parameterized class and triggers when one of the parameters change. + """ parameterized = param.ClassSelector(class_=(param.Parameterized, @@ -847,12 +861,13 @@ def __init__(self, parameterized, parameters=None, watch=True, **params): class Derived(Stream): - """ - A Stream that watches the parameters of one or more input streams and produces + """A Stream that watches the parameters of one or more input streams and produces a result that is a pure function of the input stream values. If exclusive=True, then all streams except the most recently updated are cleared. + """ + def __init__(self, input_streams, exclusive=False, **params): super().__init__(**params) self.input_streams = [] @@ -862,8 +877,8 @@ def __init__(self, input_streams, exclusive=False, **params): self.update() def _register_streams(self, streams): - """ - Register callbacks to watch for changes to input streams + """Register callbacks to watch for changes to input streams + """ for stream in streams: self._register_stream(stream) @@ -891,8 +906,8 @@ def perform_update(stream_index=i, **kwargs): self.input_streams.append(stream) def _unregister_input_streams(self): - """ - Unregister callbacks on input streams and clear input streams list + """Unregister callbacks on input streams and clear input streams list + """ for stream in self.input_streams: stream.source = None @@ -900,20 +915,20 @@ def _unregister_input_streams(self): self.input_streams.clear() def append_input_stream(self, stream): - """ - Add a new input stream + """Add a new input stream + """ self._register_stream(stream) @property def constants(self): - """ - Dict of constants for this instance that should be passed to transform_function + """Dict of constants for this instance that should be passed to transform_function Constant values must not change in response to changes in the values of the input streams. They may, however, change in response to other stream property updates. For example, these values may change if the Stream's source element changes + """ return {} @@ -923,18 +938,20 @@ def transform(self): @classmethod def transform_function(cls, stream_values, constants): - """ - Pure function that transforms input stream param values into the param values + """Pure function that transforms input stream param values into the param values of this Derived stream. - Args: - stream_values: list of dict - Current values of the stream params for each input_stream - constants: dict - Constants as returned by the constants property of an instance of this - stream type. - - Returns: dict + Parameters + ---------- + stream_values : list of dict + Current values of the stream params for each input_stream + constants : dict + Constants as returned by the constants property of an instance of this + stream type. + + Returns + ------- + dict dict of new Stream values where the keys match this stream's params """ raise NotImplementedError @@ -944,9 +961,10 @@ def __del__(self): class History(Stream): + """A Stream that maintains a history of the values of a single input stream + """ - A Stream that maintains a history of the values of a single input stream - """ + values = param.List(constant=True, doc=""" List containing the historical values of the input stream""") @@ -965,8 +983,8 @@ def clear_history(self): del self.values[:] def _register_input_stream(self): - """ - Register callback on input_stream to watch for changes + """Register callback on input_stream to watch for changes + """ def perform_update(**kwargs): self.values.append(kwargs) @@ -1260,10 +1278,10 @@ def transform_function(cls, stream_values, constants): class LinkedStream(Stream): - """ - A LinkedStream indicates is automatically linked to plot interactions + """A LinkedStream indicates is automatically linked to plot interactions on a backend via a Renderer. Not all backends may support dynamically supplying stream data. + """ def __init__(self, linked=True, popup=None, popup_position="top_right", popup_anchor=None, **params): @@ -1280,13 +1298,13 @@ def __init__(self, linked=True, popup=None, popup_position="top_right", popup_an class PointerX(LinkedStream): - """ - A pointer position along the x-axis in data coordinates which may be + """A pointer position along the x-axis in data coordinates which may be a numeric or categorical dimension. With the appropriate plotting backend, this corresponds to the position of the mouse/trackpad cursor. If the pointer is outside the plot bounds, the position is set to None. + """ x = param.ClassSelector(class_=pointer_types, default=None, @@ -1295,15 +1313,14 @@ class PointerX(LinkedStream): class PointerY(LinkedStream): - """ - A pointer position along the y-axis in data coordinates which may be + """A pointer position along the y-axis in data coordinates which may be a numeric or categorical dimension. With the appropriate plotting backend, this corresponds to the position of the mouse/trackpad pointer. If the pointer is outside the plot bounds, the position is set to None. - """ + """ y = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" @@ -1311,13 +1328,13 @@ class PointerY(LinkedStream): class PointerXY(LinkedStream): - """ - A pointer position along the x- and y-axes in data coordinates which + """A pointer position along the x- and y-axes in data coordinates which may numeric or categorical dimensions. With the appropriate plotting backend, this corresponds to the position of the mouse/trackpad pointer. If the pointer is outside the plot bounds, the position values are set to None. + """ x = param.ClassSelector(class_=pointer_types, default=None, @@ -1330,10 +1347,11 @@ class PointerXY(LinkedStream): class Draw(PointerXY): - """ - A series of updating x/y-positions when drawing, together with the + """A series of updating x/y-positions when drawing, together with the current stroke count + """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1347,8 +1365,8 @@ class Draw(PointerXY): stroke is started.""") class SingleTap(PointerXY): - """ - The x/y-position of a single tap or click in data coordinates. + """The x/y-position of a single tap or click in data coordinates. + """ x = param.ClassSelector(class_=pointer_types, default=None, @@ -1360,9 +1378,10 @@ class SingleTap(PointerXY): Pointer position along the y-axis in data coordinates""") class Tap(PointerXY): + """The x/y-position of a tap or click in data coordinates. + """ - The x/y-position of a tap or click in data coordinates. - """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1373,8 +1392,8 @@ class Tap(PointerXY): class MultiAxisTap(LinkedStream): - """ - The x/y-positions of a tap or click in data coordinates. + """The x/y-positions of a tap or click in data coordinates. + """ xs = param.Dict(default=None, constant=True, doc=""" @@ -1385,9 +1404,10 @@ class MultiAxisTap(LinkedStream): class DoubleTap(PointerXY): + """The x/y-position of a double-tap or -click in data coordinates. + """ - The x/y-position of a double-tap or -click in data coordinates. - """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1397,9 +1417,10 @@ class DoubleTap(PointerXY): Pointer position along the y-axis in data coordinates""") class PressUp(PointerXY): + """The x/y position of a mouse pressup event in data coordinates. + """ - The x/y position of a mouse pressup event in data coordinates. - """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1410,7 +1431,9 @@ class PressUp(PointerXY): class PanEnd(PointerXY): """The x/y position of a the end of a pan event in data coordinates. + """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1420,10 +1443,11 @@ class PanEnd(PointerXY): Pointer position along the y-axis in data coordinates""") class MouseEnter(PointerXY): - """ - The x/y-position where the mouse/cursor entered the plot area + """The x/y-position where the mouse/cursor entered the plot area in data coordinates. + """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1434,10 +1458,11 @@ class MouseEnter(PointerXY): class MouseLeave(PointerXY): - """ - The x/y-position where the mouse/cursor entered the plot area + """The x/y-position where the mouse/cursor entered the plot area in data coordinates. + """ + x = param.ClassSelector(class_=pointer_types, default=None, constant=True, doc=""" Pointer position along the x-axis in data coordinates""") @@ -1448,8 +1473,8 @@ class MouseLeave(PointerXY): class PlotSize(LinkedStream): - """ - Returns the dimensions of a plot once it has been displayed. + """Returns the dimensions of a plot once it has been displayed. + """ width = param.Integer(default=None, constant=True, doc="The width of the plot in pixels") @@ -1475,8 +1500,8 @@ class SelectMode(LinkedStream): class RangeXY(LinkedStream): - """ - Axis ranges along x- and y-axis in data coordinates. + """Axis ranges along x- and y-axis in data coordinates. + """ x_range = param.Tuple(default=None, length=2, constant=True, doc=""" @@ -1487,8 +1512,8 @@ class RangeXY(LinkedStream): class RangeX(LinkedStream): - """ - Axis range along x-axis in data coordinates. + """Axis range along x-axis in data coordinates. + """ x_range = param.Tuple(default=None, length=2, constant=True, doc=""" @@ -1500,8 +1525,8 @@ def _set_stream_parameters(self, **kwargs): class RangeY(LinkedStream): - """ - Axis range along y-axis in data coordinates. + """Axis range along y-axis in data coordinates. + """ y_range = param.Tuple(default=None, length=2, constant=True, doc=""" @@ -1513,9 +1538,9 @@ def _set_stream_parameters(self, **kwargs): class BoundsXY(LinkedStream): - """ - A stream representing the bounds of a box selection as an + """A stream representing the bounds of a box selection as an tuple of the left, bottom, right and top coordinates. + """ bounds = param.Tuple(default=None, constant=True, length=4, @@ -1524,9 +1549,9 @@ class BoundsXY(LinkedStream): class Lasso(LinkedStream): - """ - A stream representing a lasso selection in 2D space as a two-column + """A stream representing a lasso selection in 2D space as a two-column array of coordinates. + """ geometry = param.Array(constant=True, doc=""" @@ -1534,10 +1559,10 @@ class Lasso(LinkedStream): class SelectionXY(BoundsXY): - """ - A stream representing the selection along the x-axis and y-axis. + """A stream representing the selection along the x-axis and y-axis. Unlike a BoundsXY stream, this stream returns range or categorical selections. + """ bounds = param.Tuple(default=None, constant=True, length=4, @@ -1556,9 +1581,9 @@ class SelectionXY(BoundsXY): class BoundsX(LinkedStream): - """ - A stream representing the bounds of a box selection as an + """A stream representing the bounds of a box selection as an tuple of the left and right coordinates. + """ boundsx = param.Tuple(default=None, constant=True, length=2, @@ -1567,9 +1592,9 @@ class BoundsX(LinkedStream): class BoundsY(LinkedStream): - """ - A stream representing the bounds of a box selection as an + """A stream representing the bounds of a box selection as an tuple of the bottom and top coordinates. + """ boundsy = param.Tuple(default=None, constant=True, length=2, @@ -1578,8 +1603,8 @@ class BoundsY(LinkedStream): class Selection1D(LinkedStream): - """ - A stream representing a 1D selection of objects by their index. + """A stream representing a 1D selection of objects by their index. + """ index = param.List(default=[], allow_None=True, constant=True, doc=""" @@ -1587,8 +1612,8 @@ class Selection1D(LinkedStream): class PlotReset(LinkedStream): - """ - A stream signalling when a plot reset event has been triggered. + """A stream signalling when a plot reset event has been triggered. + """ resetting = param.Boolean(default=False, constant=True, doc=""" @@ -1599,8 +1624,8 @@ def __init__(self, *args, **params): class CDSStream(LinkedStream): - """ - A Stream that syncs a bokeh ColumnDataSource with python. + """A Stream that syncs a bokeh ColumnDataSource with python. + """ data = param.Dict(constant=True, doc=""" @@ -1611,27 +1636,26 @@ class CDSStream(LinkedStream): class PointDraw(CDSStream): - """ - Attaches a PointDrawTool and syncs the datasource. + """Attaches a PointDrawTool and syncs the datasource. - add: boolean + add : boolean Whether to allow adding new Points - drag: boolean + drag : boolean Whether to enable dragging of Points - empty_value: int/float/string/None + empty_value : int/float/string/None The value to insert on non-position columns when adding a new polygon - num_objects: int + num_objects : int The number of polygons that can be drawn before overwriting the oldest polygon. - styles: dict + styles : dict A dictionary specifying lists of styles to cycle over whenever a new Point glyph is drawn. - tooltip: str + tooltip : str An optional tooltip to override the default """ @@ -1671,13 +1695,12 @@ def dynamic(self): class CurveEdit(PointDraw): - """ - Attaches a PointDraw to the plot which allows editing the Curve when selected. + """Attaches a PointDraw to the plot which allows editing the Curve when selected. - style: dict + style : dict A dictionary specifying the style of the vertices. - tooltip: str + tooltip : str An optional tooltip to override the default """ @@ -1697,30 +1720,29 @@ def __init__(self, style=None, tooltip=None, **params): class PolyDraw(CDSStream): - """ - Attaches a PolyDrawTool and syncs the datasource. + """Attaches a PolyDrawTool and syncs the datasource. - drag: boolean + drag : boolean Whether to enable dragging of polygons and paths - empty_value: int/float/string/None + empty_value : int/float/string/None The value to insert on non-position columns when adding a new polygon - num_objects: int + num_objects : int The number of polygons that can be drawn before overwriting the oldest polygon. - show_vertices: boolean + show_vertices : boolean Whether to show the vertices when a polygon is selected - styles: dict + styles : dict A dictionary specifying lists of styles to cycle over whenever a new Poly glyph is drawn. - tooltip: str + tooltip : str An optional tooltip to override the default - vertex_style: dict + vertex_style : dict A dictionary specifying the style options for the vertices. The usual bokeh style options apply, e.g. fill_color, line_alpha, size, etc. @@ -1771,21 +1793,20 @@ def dynamic(self): class FreehandDraw(CDSStream): - """ - Attaches a FreehandDrawTool and syncs the datasource. + """Attaches a FreehandDrawTool and syncs the datasource. - empty_value: int/float/string/None + empty_value : int/float/string/None The value to insert on non-position columns when adding a new polygon - num_objects: int + num_objects : int The number of polygons that can be drawn before overwriting the oldest polygon. - styles: dict + styles : dict A dictionary specifying lists of styles to cycle over whenever a new freehand glyph is drawn. - tooltip: str + tooltip : str An optional tooltip to override the default """ @@ -1827,21 +1848,20 @@ def dynamic(self): class BoxEdit(CDSStream): - """ - Attaches a BoxEditTool and syncs the datasource. + """Attaches a BoxEditTool and syncs the datasource. - empty_value: int/float/string/None + empty_value : int/float/string/None The value to insert on non-position columns when adding a new box - num_objects: int + num_objects : int The number of boxes that can be drawn before overwriting the oldest drawn box. - styles: dict + styles : dict A dictionary specifying lists of styles to cycle over whenever a new box glyph is drawn. - tooltip: str + tooltip : str An optional tooltip to override the default """ @@ -1894,16 +1914,15 @@ def dynamic(self): class PolyEdit(PolyDraw): - """ - Attaches a PolyEditTool and syncs the datasource. + """Attaches a PolyEditTool and syncs the datasource. - shared: boolean + shared : boolean Whether PolyEditTools should be shared between multiple elements - tooltip: str + tooltip : str An optional tooltip to override the default - vertex_style: dict + vertex_style : dict A dictionary specifying the style options for the vertices. The usual bokeh style options apply, e.g. fill_color, line_alpha, size, etc. diff --git a/holoviews/util/__init__.py b/holoviews/util/__init__.py index f0e7e1251b..095ba758cf 100644 --- a/holoviews/util/__init__.py +++ b/holoviews/util/__init__.py @@ -33,8 +33,8 @@ def examples(path='holoviews-examples', verbose=False, force=False, root=__file__): - """ - Copies the notebooks to the supplied path. + """Copies the notebooks to the supplied path. + """ filepath = os.path.abspath(os.path.dirname(root)) example_dir = os.path.join(filepath, './examples') @@ -54,9 +54,9 @@ def examples(path='holoviews-examples', verbose=False, force=False, root=__file_ class OptsMeta(param.parameterized.ParameterizedMetaclass): - """ - Improve error message when running something - like: 'hv.opts.Curve()' without a plotting backend. + """Improve error message when running something + like : 'hv.opts.Curve()' without a plotting backend. + """ def __getattr__(self, attr): @@ -71,8 +71,7 @@ def __getattr__(self, attr): class opts(param.ParameterizedFunction, metaclass=OptsMeta): - """ - Utility function to set options at the global level or to provide an + """Utility function to set options at the global level or to provide an Options object that can be used with the .options method of an element or container. @@ -97,6 +96,7 @@ class opts(param.ParameterizedFunction, metaclass=OptsMeta): curve.options(opts.Curve(color='red')) The options method also accepts lists of Option objects. + """ __original_docstring__ = None @@ -122,7 +122,9 @@ def __call__(self, *args, **params): @classmethod def _group_kwargs_to_options(cls, obj, kwargs): - "Format option group kwargs into canonical options format" + """Format option group kwargs into canonical options format + + """ groups = Options._option_groups if set(kwargs.keys()) - set(groups): raise Exception("Keyword options {} must be one of {}".format(groups, @@ -157,7 +159,9 @@ def _group_kwargs_to_options(cls, obj, kwargs): @classmethod def _apply_groups_to_backend(cls, obj, options, backend, clone): - "Apply the groups to a single specified backend" + """Apply the groups to a single specified backend + + """ obj_handle = obj if options is None: if clone: @@ -172,8 +176,9 @@ def _apply_groups_to_backend(cls, obj, options, backend, clone): @classmethod def _grouped_backends(cls, options, backend): - "Group options by backend and filter out output group appropriately" + """Group options by backend and filter out output group appropriately + """ if options is None: return [(backend or Store.current_backend, options)] dfltdict = defaultdict(dict) @@ -213,22 +218,27 @@ def apply_groups(cls, obj, options=None, backend=None, clone=True, **kwargs): If no opts are supplied all options on the object will be reset. - Args: - options (dict): Options specification - Options specification should be indexed by - type[.group][.label] or option type ('plot', 'style', - 'norm'). - backend (optional): Backend to apply options to - Defaults to current selected backend - clone (bool, optional): Whether to clone object - Options can be applied inplace with clone=False - **kwargs: Keywords of options by type - Applies options directly to the object by type - (e.g. 'plot', 'style', 'norm') specified as - dictionaries. - - Returns: - Returns the object or a clone with the options applied + Parameters + ---------- + options : dict + Options specification + Options specification should be indexed by + type[.group][.label] or option type ('plot', 'style', + 'norm'). + backend : optional + Backend to apply options to + Defaults to current selected backend + clone : bool, optional + Whether to clone object + Options can be applied inplace with clone=False + **kwargs: Keywords of options by type + Applies options directly to the object by type + (e.g. 'plot', 'style', 'norm') specified as + dictionaries. + + Returns + ------- + Returns the object or a clone with the options applied """ if isinstance(options, str): from ..util.parser import OptsSpec @@ -275,9 +285,12 @@ def defaults(cls, *options, **kwargs): Set default options for a session. whether in a Python script or a Jupyter notebook. - Args: - *options: Option objects used to specify the defaults. - backend: The plotting extension the options apply to + Parameters + ---------- + *options + Option objects used to specify the defaults. + backend + The plotting extension the options apply to """ if kwargs and len(kwargs) != 1 and next(iter(kwargs.keys())) != 'backend': raise Exception('opts.defaults only accepts "backend" keyword argument') @@ -288,9 +301,9 @@ def defaults(cls, *options, **kwargs): @classmethod def _expand_by_backend(cls, options, backend): - """ - Given a list of flat Option objects which may or may not have + """Given a list of flat Option objects which may or may not have 'backend' in their kwargs, return a list of grouped backend + """ groups = defaultdict(list) used_fallback = False @@ -314,16 +327,15 @@ def _expand_by_backend(cls, options, backend): @classmethod def _expand_options(cls, options, backend=None): - """ - Validates and expands a dictionaries of options indexed by + """Validates and expands a dictionaries of options indexed by type[.group][.label] keys into separate style, plot, norm and output options. If the backend is not loaded, ``None`` is returned. opts._expand_options({'Image': dict(cmap='viridis', show_title=False)}) - returns - - {'Image': {'plot': dict(show_title=False), 'style': dict(cmap='viridis')}} + Returns + ------- + {'Image': {'plot': dict(show_title=False), 'style': dict(cmap='viridis')}} """ current_backend = Store.current_backend @@ -371,9 +383,9 @@ def _expand_options(cls, options, backend=None): @classmethod def _options_error(cls, opt, objtype, backend, valid_options): - """ - Generates an error message for an invalid option suggesting + """Generates an error message for an invalid option suggesting similar options through fuzzy matching. + """ current_backend = Store.current_backend loaded_backends = Store.loaded_backends() @@ -415,12 +427,12 @@ def _options_error(cls, opt, objtype, backend, valid_options): @classmethod def _builder_reprs(cls, options, namespace=None, ns=None): - """ - Given a list of Option objects (such as those returned from + """Given a list of Option objects (such as those returned from OptsSpec.parse_options) or an %opts or %%opts magic string, return a list of corresponding option builder reprs. The namespace is typically given as 'hv' if fully qualified namespaces are desired. + """ if isinstance(options, str): from .parser import OptsSpec @@ -500,7 +512,9 @@ def builder(cls, spec=None, **kws): @classmethod def _element_keywords(cls, backend, elements=None): - "Returns a dictionary of element names to allowed keywords" + """Returns a dictionary of element names to allowed keywords + + """ if backend not in Store.loaded_backends(): return {} @@ -545,8 +559,7 @@ def _update_backend(cls, backend): class output(param.ParameterizedFunction): - """ - Utility function to set output either at the global level or on a + """Utility function to set output either at the global level or on a specific object. To set output globally use: @@ -576,6 +589,7 @@ class output(param.ParameterizedFunction): These two modes are equivalent to the IPython output line magic and the cell magic respectively. + """ def __init__(self, *args, **kwargs): @@ -636,8 +650,8 @@ def display_fn(obj, renderer): def renderer(name): - """ - Helper utility to access the active renderer for a given extension. + """Helper utility to access the active renderer for a given extension. + """ try: if name not in Store.renderers: @@ -655,8 +669,7 @@ def renderer(name): class extension(_pyviz_extension): - """ - Helper utility used to load holoviews extensions. These can be + """Helper utility used to load holoviews extensions. These can be plotting extensions, element extensions or anything else that can be registered to work with HoloViews. @@ -750,7 +763,9 @@ def __call__(self, *args, **params): @classmethod def register_backend_callback(cls, backend, callback): - """Registers a hook which is run when a backend is loaded""" + """Registers a hook which is run when a backend is loaded + + """ cls._backend_hooks[backend].append(callback) def _ignore_bokeh_warnings(self): @@ -761,8 +776,7 @@ def _ignore_bokeh_warnings(self): def save(obj, filename, fmt='auto', backend=None, resources='cdn', toolbar=None, title=None, **kwargs): - """ - Saves the supplied object to file. + """Saves the supplied object to file. The available output formats depend on the backend being used. By default and if the filename is a string the output format will be @@ -773,27 +787,27 @@ def save(obj, filename, fmt='auto', backend=None, resources='cdn', toolbar=None, default to fmt='widgets', which may be changed to scrubber widgets using fmt='scrubber'. - Arguments - --------- - obj: HoloViews object + Parameters + ---------- + obj : HoloViews object The HoloViews object to save to file - filename: string or IO object + filename : string or IO object The filename or BytesIO/StringIO object to save to - fmt: string + fmt : string The format to save the object as, e.g. png, svg, html, or gif and if widgets are desired either 'widgets' or 'scrubber' - backend: string + backend : string A valid HoloViews rendering backend, e.g. bokeh or matplotlib - resources: string or bokeh.resource.Resources + resources : string or bokeh.resource.Resources Bokeh resources used to load bokehJS components. Defaults to CDN, to embed resources inline for offline usage use 'inline' or bokeh.resources.INLINE. - toolbar: bool or None + toolbar : bool or None Whether to include toolbars in the exported plot. If None, display the toolbar unless fmt is `png` and backend is `bokeh`. If `True`, always include the toolbar. If `False`, do not include the toolbar. - title: string + title : string Custom title for exported HTML file **kwargs: dict Additional keyword arguments passed to the renderer, @@ -826,8 +840,7 @@ def save(obj, filename, fmt='auto', backend=None, resources='cdn', toolbar=None, def render(obj, backend=None, **kwargs): - """ - Renders the HoloViews object to the corresponding object in the + """Renders the HoloViews object to the corresponding object in the specified backend, e.g. a Matplotlib or Bokeh figure. The backend defaults to the currently declared default @@ -838,11 +851,11 @@ def render(obj, backend=None, **kwargs): you can use like any hand-constructed Bokeh figure in a Bokeh layout. - Arguments - --------- - obj: HoloViews object + Parameters + ---------- + obj : HoloViews object The HoloViews object to render - backend: string + backend : string A valid HoloViews rendering backend **kwargs: dict Additional keyword arguments passed to the renderer, @@ -850,7 +863,7 @@ def render(obj, backend=None, **kwargs): Returns ------- - rendered: + rendered : The rendered representation of the HoloViews object, e.g. if backend='matplotlib' a matplotlib Figure or FuncAnimation """ @@ -866,8 +879,7 @@ def render(obj, backend=None, **kwargs): class Dynamic(param.ParameterizedFunction): - """ - Dynamically applies a callable to the Elements in any HoloViews + """Dynamically applies a callable to the Elements in any HoloViews object. Will return a DynamicMap wrapping the original map object, which will lazily evaluate when a key is requested. By default Dynamic applies a no-op, making it useful for converting HoloMaps @@ -879,6 +891,7 @@ class Dynamic(param.ParameterizedFunction): decorated with parameter dependencies Dynamic will automatically create a stream to watch the parameter changes. This default behavior may be disabled by setting watch=False. + """ operation = param.Callable(default=lambda x: x, doc=""" @@ -932,12 +945,12 @@ def __call__(self, map_obj, **params): def _get_streams(self, map_obj, watch=True): - """ - Generates a list of streams to attach to the returned DynamicMap. + """Generates a list of streams to attach to the returned DynamicMap. If the input is a DynamicMap any streams that are supplying values for the key dimension of the input are inherited. And the list of supplied stream classes and instances are processed and added to the list. + """ from panel.widgets.base import Widget @@ -1029,9 +1042,9 @@ def _process(self, element, key=None, kwargs=None): return self.p.operation(element, **kwargs) def _dynamic_operation(self, map_obj): - """ - Generate function to dynamically apply the operation. + """Generate function to dynamically apply the operation. Wraps an existing HoloMap or DynamicMap. + """ def resolve(key, kwargs): if not isinstance(map_obj, HoloMap): @@ -1064,9 +1077,9 @@ def dynamic_operation(*key, **kwargs): def _make_dynamic(self, hmap, dynamic_fn, streams): - """ - Accepts a HoloMap and a dynamic callback function creating + """Accepts a HoloMap and a dynamic callback function creating an equivalent DynamicMap from the HoloMap. + """ if isinstance(hmap, ViewableElement): dmap = DynamicMap(dynamic_fn, streams=streams) diff --git a/holoviews/util/command.py b/holoviews/util/command.py index f1717e6234..9b82b7c1b8 100755 --- a/holoviews/util/command.py +++ b/holoviews/util/command.py @@ -1,8 +1,8 @@ #! /usr/bin/env python -""" -python -m holoviews.util.command Conversion_Example.ipynb +"""python -m holoviews.util.command Conversion_Example.ipynb OR holoviews Conversion_Example.ipynb + """ import argparse diff --git a/holoviews/util/locator.py b/holoviews/util/locator.py index 6d9083220c..0c75093de5 100644 --- a/holoviews/util/locator.py +++ b/holoviews/util/locator.py @@ -1,7 +1,7 @@ -""" -Minimal set of functionality of Matplotlib's MaxNLocator to choose contour +"""Minimal set of functionality of Matplotlib's MaxNLocator to choose contour levels without having to have Matplotlib installed. Taken from Matplotlib 3.8.0. + """ import math @@ -9,12 +9,13 @@ class _Edge_integer: - """ - Helper for `.MaxNLocator`, `.MultipleLocator`, etc. + """Helper for `.MaxNLocator`, `.MultipleLocator`, etc. Take floating-point precision limitations into account when calculating tick locations as integer multiples of a step. + """ + def __init__(self, step, offset): """ Parameters @@ -41,14 +42,18 @@ def closeto(self, ms, edge): return abs(ms - edge) < tol def le(self, x): - """Return the largest n: n*step <= x.""" + """Return the largest n: n*step <= x. + + """ d, m = divmod(x, self.step) if self.closeto(m / self.step, 1): return d + 1 return d def ge(self, x): - """Return the smallest n: n*step >= x.""" + """Return the smallest n: n*step >= x. + + """ d, m = divmod(x, self.step) if self.closeto(m / self.step, 0): return d @@ -56,8 +61,7 @@ def ge(self, x): def nonsingular(vmin, vmax, expander=0.001, tiny=1e-15, increasing=True): - """ - Modify the endpoints of a range as needed to avoid singularities. + """Modify the endpoints of a range as needed to avoid singularities. Parameters ---------- @@ -82,7 +86,6 @@ def nonsingular(vmin, vmax, expander=0.001, tiny=1e-15, increasing=True): If either input is inf or NaN, or if both inputs are 0 or very close to zero, it returns -*expander*, *expander*. """ - if (not np.isfinite(vmin)) or (not np.isfinite(vmax)): return -expander, expander diff --git a/holoviews/util/parser.py b/holoviews/util/parser.py index 6d7ec7906e..19afcf3a6b 100644 --- a/holoviews/util/parser.py +++ b/holoviews/util/parser.py @@ -1,5 +1,4 @@ -""" -The magics offered by the HoloViews IPython extension are powerful and +"""The magics offered by the HoloViews IPython extension are powerful and support rich, compositional specifications. To avoid the the brittle, convoluted code that results from trying to support the syntax in pure Python, this file defines suitable parsers using pyparsing that are @@ -7,6 +6,7 @@ Pyparsing is required by matplotlib and will therefore be available if HoloViews is being used in conjunction with matplotlib. + """ from itertools import groupby @@ -30,9 +30,9 @@ class ParserWarning(param.Parameterized):pass parsewarning = ParserWarning(name='Warning') class Parser: - """ - Base class for magic line parsers, designed for forgiving parsing + """Base class for magic line parsers, designed for forgiving parsing of keyword lists. + """ # Static namespace set in __init__.py of the extension @@ -42,7 +42,9 @@ class Parser: @classmethod def _strip_commas(cls, kw): - "Strip out any leading/training commas from the token" + """Strip out any leading/training commas from the token + + """ kw = kw[:-1] if kw[-1]==',' else kw return kw[1:] if kw[0]==',' else kw @@ -61,8 +63,8 @@ def recurse_token(cls, token, inner): @classmethod def collect_tokens(cls, parseresult, mode): - """ - Collect the tokens from a (potentially) nested parse result. + """Collect the tokens from a (potentially) nested parse result. + """ inner = '(%s)' if mode=='parens' else '[%s]' if parseresult is None: return [] @@ -79,12 +81,12 @@ def collect_tokens(cls, parseresult, mode): @classmethod def todict(cls, parseresult, mode='parens', ns=None): - """ - Helper function to return dictionary given the parse results + """Helper function to return dictionary given the parse results from a pyparsing.nestedExpr object (containing keywords). The ns is a dynamic namespace (typically the IPython Notebook namespace) used to update the class-level namespace. + """ if ns is None: ns = {} @@ -124,8 +126,7 @@ def todict(cls, parseresult, mode='parens', ns=None): class OptsSpec(Parser): - """ - An OptsSpec is a string specification that describes an + """An OptsSpec is a string specification that describes an OptionTree. It is a list of tree path specifications (using dotted syntax) separated by keyword lists for any of the style, plotting or normalization options. These keyword lists are denoted @@ -147,6 +148,7 @@ class OptsSpec(Parser): optional and additional spaces are often allowed. The only restriction is that keywords *must* be immediately followed by the '=' sign (no space). + """ plot_options_short = pp.nestedExpr('[', @@ -219,10 +221,10 @@ class OptsSpec(Parser): @classmethod def process_normalization(cls, parse_group): - """ - Given a normalization parse group (i.e. the contents of the + """Given a normalization parse group (i.e. the contents of the braces), validate the option list and compute the appropriate integer value for the normalization plotting option. + """ if ('norm_options' not in parse_group): return None opts = parse_group['norm_options'][0].asList() @@ -260,14 +262,14 @@ def process_normalization(cls, parse_group): @classmethod def _group_paths_without_options(cls, line_parse_result): - """ - Given a parsed options specification as a list of groups, combine + """Given a parsed options specification as a list of groups, combine groups without options with the first subsequent group which has options. A line of the form 'A B C [opts] D E [opts_2]' results in [({A, B, C}, [opts]), ({D, E}, [opts_2])] + """ active_pathspecs = set() for group in line_parse_result: @@ -290,7 +292,9 @@ def _group_paths_without_options(cls, line_parse_result): @classmethod def apply_deprecations(cls, path): - "Convert any potentially deprecated paths and issue appropriate warnings" + """Convert any potentially deprecated paths and issue appropriate warnings + + """ split = path.split('.') msg = 'Element {old} deprecated. Use {new} instead.' for old, new in cls.deprecations: @@ -302,9 +306,9 @@ def apply_deprecations(cls, path): @classmethod def parse(cls, line, ns=None): - """ - Parse an options specification, returning a dictionary with + """Parse an options specification, returning a dictionary with path keys and {'plot':, 'style':} values. + """ if ns is None: ns = {} @@ -349,9 +353,9 @@ def parse(cls, line, ns=None): @classmethod def parse_options(cls, line, ns=None): - """ - Similar to parse but returns a list of Options objects instead + """Similar to parse but returns a list of Options objects instead of the dictionary format. + """ if ns is None: ns = {} @@ -369,8 +373,7 @@ def parse_options(cls, line, ns=None): class CompositorSpec(Parser): - """ - The syntax for defining a set of compositor is as follows: + """The syntax for defining a set of compositor is as follows: [ mode op(spec) [settings] value ]+ @@ -383,6 +386,7 @@ class CompositorSpec(Parser): dotted path specifications. settings : Optional list of keyword arguments to be used as parameters to the operation (in square brackets). + """ mode = pp.Word(pp.alphas+pp.nums+'_').setResultsName("mode") @@ -407,8 +411,8 @@ class CompositorSpec(Parser): @classmethod def parse(cls, line, ns=None): - """ - Parse compositor specifications, returning a list Compositors + """Parse compositor specifications, returning a list Compositors + """ if ns is None: ns = {} diff --git a/holoviews/util/settings.py b/holoviews/util/settings.py index 7b4bc16a9a..6694e01fa8 100644 --- a/holoviews/util/settings.py +++ b/holoviews/util/settings.py @@ -4,10 +4,11 @@ class KeywordSettings: - """ - Base class for options settings used to specified collections of + """Base class for options settings used to specified collections of keyword options. + """ + # Dictionary from keywords to allowed bounds/values allowed = {} defaults = dict([]) # Default keyword values. @@ -22,14 +23,16 @@ class KeywordSettings: @classmethod def update_options(cls, options, items): - """ - Allows updating options depending on class attributes + """Allows updating options depending on class attributes and unvalidated options. + """ @classmethod def get_options(cls, items, options, warnfn): - "Given a keyword specification, validate and compute options" + """Given a keyword specification, validate and compute options + + """ options = cls.update_options(options, items) for keyword in cls.defaults: if keyword in items: @@ -63,14 +66,16 @@ def get_options(cls, items, options, warnfn): @classmethod def _validate(cls, options, items, warnfn): - "Allows subclasses to check options are valid." + """Allows subclasses to check options are valid. + + """ raise NotImplementedError("KeywordSettings is an abstract base class.") @classmethod def extract_keywords(cls, line, items): - """ - Given the keyword string, parse a dictionary of options. + """Given the keyword string, parse a dictionary of options. + """ unprocessed = list(reversed(line.split('='))) while unprocessed: @@ -112,9 +117,9 @@ def list_backends(): def list_formats(format_type, backend=None): - """ - Returns list of supported formats for a particular + """Returns list of supported formats for a particular backend. + """ if backend is None: backend = Store.current_backend @@ -131,8 +136,8 @@ def list_formats(format_type, backend=None): class OutputSettings(KeywordSettings): - """ - Class for controlling display and output settings. + """Class for controlling display and output settings. + """ # Lists: strict options, Set: suggested options, Tuple: numeric bounds. @@ -244,8 +249,9 @@ def _generate_signature(cls): @classmethod def _validate(cls, options, items, warnfn): - "Validation of edge cases and incompatible options" + """Validation of edge cases and incompatible options + """ if 'html' in Store.display_formats: pass elif 'fig' in items and items['fig'] not in Store.display_formats: @@ -337,9 +343,9 @@ def output(cls, line=None, cell=None, cell_runner=None, @classmethod def update_options(cls, options, items): - """ - Switch default options and backend if new backend is supplied in + """Switch default options and backend if new backend is supplied in items. + """ # Get new backend backend_spec = items.get('backend', Store.current_backend) @@ -408,8 +414,8 @@ def set_backend(cls, backend): @classmethod def _set_render_options(cls, options, backend=None): - """ - Set options on current Renderer. + """Set options on current Renderer. + """ if backend: backend = backend.split(':')[0] diff --git a/holoviews/util/transform.py b/holoviews/util/transform.py index 8c95d3d4ae..4af749424d 100644 --- a/holoviews/util/transform.py +++ b/holoviews/util/transform.py @@ -37,13 +37,17 @@ def norm(values, min=None, max=None): (values - min) / (max - min) - Args: - values: Array of values to be normalized - min (float, optional): Lower bound of normalization range - max (float, optional): Upper bound of normalization range - - Returns: - Array of normalized values + Parameters + ---------- + values : Array of values to be normalized + min : float, optional + Lower bound of normalization range + max : float, optional + Upper bound of normalization range + + Returns + ------- + Array of normalized values """ min = np.min(values) if min is None else min max = np.max(values) if max is None else max @@ -54,13 +58,17 @@ def lognorm(values, min=None, max=None): """Unity-based normalization on log scale. Apply the same transformation as matplotlib.colors.LogNorm - Args: - values: Array of values to be normalized - min (float, optional): Lower bound of normalization range - max (float, optional): Upper bound of normalization range - - Returns: - Array of normalized values + Parameters + ---------- + values : Array of values to be normalized + min : float, optional + Lower bound of normalization range + max : float, optional + Upper bound of normalization range + + Returns + ------- + Array of normalized values """ min = np.log(np.min(values)) if min is None else np.log(min) max = np.log(np.max(values)) if max is None else np.log(max) @@ -69,6 +77,7 @@ def lognorm(values, min=None, max=None): class iloc: """Implements integer array indexing for dim expressions. + """ __name__ = 'iloc' @@ -90,6 +99,7 @@ def __call__(self, values): class loc: """Implements loc for dim expressions. + """ __name__ = 'loc' @@ -114,14 +124,16 @@ def bin(values, bins, labels=None): with bin center values but an explicit list of bin labels may be defined. - Args: - values: Array of values to be binned - bins: List or array containing the bin boundaries - labels: List of labels to assign to each bin - If the bins are length N the labels should be length N-1 + Parameters + ---------- + values : Array of values to be binned + bins : List or array containing the bin boundaries + labels : List of labels to assign to each bin + If the bins are length N the labels should be length N-1 - Returns: - Array of binned values + Returns + ------- + Array of binned values """ bins = np.asarray(bins) if labels is None: @@ -143,13 +155,15 @@ def categorize(values, categories, default=None): Replaces discrete values in input array with a fixed set of categories defined either as a list or dictionary. - Args: - values: Array of values to be categorized - categories: List or dict of categories to map inputs to - default: Default value to assign if value not in categories + Parameters + ---------- + values : Array of values to be categorized + categories : List or dict of categories to map inputs to + default : Default value to assign if value not in categories - Returns: - Array of categorized values + Returns + ------- + Array of categorized values """ uniq_cats = list(unique_iterator(values)) cats = [] @@ -189,11 +203,11 @@ def _python_isin(array, values): class dim: - """ - dim transform objects are a way to express deferred transforms on + """dim transform objects are a way to express deferred transforms on Datasets. dim transforms support all mathematical and bitwise operators, NumPy ufuncs and methods, and provide a number of useful methods for normalizing, binning and categorizing data. + """ _binary_funcs = { @@ -324,9 +338,9 @@ def __hash__(self): return hash(repr(self)) def clone(self, dimension=None, ops=None, dim_type=None): - """ - Creates a clone of the dim expression optionally overriding + """Creates a clone of the dim expression optionally overriding the dim and ops. + """ dim_type = dim_type or type(self) if dimension is None: @@ -339,9 +353,9 @@ def clone(self, dimension=None, ops=None, dim_type=None): @classmethod def register(cls, key, function): - """ - Register a custom dim transform function which can from then + """Register a custom dim transform function which can from then on be referenced by the key. + """ cls._custom_funcs[key] = function @@ -489,10 +503,11 @@ def bin(self, bins, labels=None): either computed from each bins center point or from the supplied labels. - Args: - bins: List or array containing the bin boundaries - labels: List of labels to assign to each bin - If the bins are length N the labels should be length N-1 + Parameters + ---------- + bins : List or array containing the bin boundaries + labels : List of labels to assign to each bin + If the bins are length N the labels should be length N-1 """ return type(self)(self, bin, bins, labels=labels) @@ -502,9 +517,10 @@ def categorize(self, categories, default=None): Replaces discrete values in input array into a fixed set of categories defined either as a list or dictionary. - Args: - categories: List or dict of categories to map inputs to - default: Default value to assign if value not in categories + Parameters + ---------- + categories : List or dict of categories to map inputs to + default : Default value to assign if value not in categories """ return type(self)(self, categorize, categories=categories, default=default) @@ -512,8 +528,9 @@ def lognorm(self, limits=None): """Unity-based normalization log scale. Apply the same transformation as matplotlib.colors.LogNorm - Args: - limits: tuple of (min, max) defining the normalization range + Parameters + ---------- + limits : tuple of (min, max) defining the normalization range """ kwargs = {} if limits is not None: @@ -525,8 +542,9 @@ def norm(self, limits=None): (values - min) / (max - min) - Args: - limits: tuple of (min, max) defining the normalization range + Parameters + ---------- + limits : tuple of (min, max) defining the normalization range """ kwargs = {} if limits is not None: @@ -535,10 +553,10 @@ def norm(self, limits=None): @classmethod def pipe(cls, func, *args, **kwargs): - """ - Wrapper to give multidimensional transforms a more intuitive syntax. + """Wrapper to give multidimensional transforms a more intuitive syntax. For a custom function 'func' with signature (*args, **kwargs), call as dim.pipe(func, *args, **kwargs). + """ args = list(args) # make mutable for k, arg in enumerate(args): @@ -548,16 +566,18 @@ def pipe(cls, func, *args, **kwargs): @property def str(self): - "Casts values to strings or provides str accessor." + """Casts values to strings or provides str accessor. + + """ return type(self)(self, 'str', accessor=True) # Other methods def applies(self, dataset, strict=False): - """ - Determines whether the dim transform can be applied to the + """Determines whether the dim transform can be applied to the Dataset, i.e. whether all referenced dimensions can be resolved. + """ from ..element import Graph @@ -656,18 +676,18 @@ def _apply_fn(self, dataset, data, fn, fn_name, args, kwargs, accessor, drange): return data def _compute_data(self, data, drop_index, compute): - """ - Implements conversion of data from namespace specific object, + """Implements conversion of data from namespace specific object, e.g. pandas Series to NumPy array. + """ if hasattr(data, 'compute') and compute: data = data.compute() return data def _coerce(self, data): - """ - Implements coercion of data from current data format to the + """Implements coercion of data from current data format to the namespace specific datatype. + """ return data @@ -675,24 +695,35 @@ def apply(self, dataset, flat=False, expanded=None, ranges=None, all_values=Fals keep_index=False, compute=True, strict=False): """Evaluates the transform on the supplied dataset. - Args: - dataset: Dataset object to evaluate the expression on - flat: Whether to flatten the returned array - expanded: Whether to use the expanded expand values - ranges: Dictionary for ranges for normalization - all_values: Whether to evaluate on all values - Whether to evaluate on all available values, for some - element types, such as Graphs, this may include values - not included in the referenced column - keep_index: For data types that support indexes, whether the index - should be preserved in the result. - compute: For data types that support lazy evaluation, whether - the result should be computed before it is returned. - strict: Whether to strictly check for dimension matches - (if False, counts any dimensions with matching names as the same) - - Returns: - values: NumPy array computed by evaluating the expression + Parameters + ---------- + dataset + Dataset object to evaluate the expression on + flat + Whether to flatten the returned array + expanded + Whether to use the expanded expand values + ranges + Dictionary for ranges for normalization + all_values + Whether to evaluate on all values + Whether to evaluate on all available values, for some + element types, such as Graphs, this may include values + not included in the referenced column + keep_index + For data types that support indexes, whether the index + should be preserved in the result. + compute + For data types that support lazy evaluation, whether + the result should be computed before it is returned. + strict + Whether to strictly check for dimension matches + (if False, counts any dimensions with matching names as the same) + + Returns + ------- + values + NumPy array computed by evaluating the expression """ from ..element import Graph @@ -856,10 +887,10 @@ def __repr__(self): class df_dim(dim): - """ - A subclass of dim which provides access to the DataFrame namespace + """A subclass of dim which provides access to the DataFrame namespace along with tab-completion and type coercion allowing the expression to be applied on any columnar dataset. + """ namespace = 'dataframe' @@ -897,10 +928,10 @@ def loc(self): class xr_dim(dim): - """ - A subclass of dim which provides access to the xarray DataArray + """A subclass of dim which provides access to the xarray DataArray namespace along with tab-completion and type coercion allowing the expression to be applied on any gridded dataset. + """ namespace = 'xarray' @@ -934,32 +965,35 @@ def _coerce(self, dataset): def lon_lat_to_easting_northing(longitude, latitude): - """ - Projects the given longitude, latitude values into Web Mercator + """Projects the given longitude, latitude values into Web Mercator (aka Pseudo-Mercator or EPSG:3857) coordinates. Longitude and latitude can be provided as scalars, Pandas columns, or Numpy arrays, and will be returned in the same form. Lists or tuples will be converted to Numpy arrays. - Args: - longitude - latitude + Parameters + ---------- + longitude + latitude + + Returns + ------- + (easting, northing) - Returns: - (easting, northing) + Examples + -------- + >>> easting, northing = lon_lat_to_easting_northing(-74,40.71) - Examples: - easting, northing = lon_lat_to_easting_northing(-74,40.71) + >>> easting, northing = lon_lat_to_easting_northing( + np.array([-74]),np.array([40.71]) + ) - easting, northing = lon_lat_to_easting_northing( - np.array([-74]),np.array([40.71]) - ) + >>> df=pandas.DataFrame(dict(longitude=np.array([-74]),latitude=np.array([40.71]))) - df=pandas.DataFrame(dict(longitude=np.array([-74]),latitude=np.array([40.71]))) - df.loc[:, 'longitude'], df.loc[:, 'latitude'] = lon_lat_to_easting_northing( - df.longitude,df.latitude - ) + >>> df.loc[:, 'longitude'], df.loc[:, 'latitude'] = lon_lat_to_easting_northing( + df.longitude,df.latitude + ) """ if isinstance(longitude, (list, tuple)): longitude = np.array(longitude) @@ -976,19 +1010,20 @@ def lon_lat_to_easting_northing(longitude, latitude): def easting_northing_to_lon_lat(easting, northing): - """ - Projects the given easting, northing values into + """Projects the given easting, northing values into longitude, latitude coordinates. easting and northing values are assumed to be in Web Mercator (aka Pseudo-Mercator or EPSG:3857) coordinates. - Args: - easting - northing + Parameters + ---------- + easting + northing - Returns: - (longitude, latitude) + Returns + ------- + (longitude, latitude) """ if isinstance(easting, (list, tuple)): easting = np.array(easting) diff --git a/holoviews/util/warnings.py b/holoviews/util/warnings.py index 50a459eb8f..5e74885e27 100644 --- a/holoviews/util/warnings.py +++ b/holoviews/util/warnings.py @@ -22,11 +22,10 @@ def warn(message, category=None, stacklevel=None): def find_stack_level(): - """ - Find the first place in the stack that is not inside Holoviews and Param. + """Find the first place in the stack that is not inside Holoviews and Param. Inspired by: pandas.util._exceptions.find_stack_level - """ + """ import holoviews as hv pkg_dir = os.path.dirname(hv.__file__) @@ -78,10 +77,12 @@ def deprecated(remove_version, old, new=None, extra=None): class HoloviewsDeprecationWarning(DeprecationWarning): """A Holoviews-specific ``DeprecationWarning`` subclass. Used to selectively filter Holoviews deprecations for unconditional display. + """ class HoloviewsUserWarning(UserWarning): """A Holoviews-specific ``UserWarning`` subclass. Used to selectively filter Holoviews warnings for unconditional display. + """ diff --git a/pyproject.toml b/pyproject.toml index 682761e7e1..caf792a09e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -140,6 +140,7 @@ fix = true [tool.ruff.lint] select = [ "B", + "D", "E", "F", "FLY", @@ -171,6 +172,12 @@ ignore = [ "PLW2901", # `for` loop variable is overwritten "RUF012", # Mutable class attributes should use `typing.ClassVar` "RUF021", # parenthesize-chained-operators + "D1", # Undocumented code + "D200", # Unnecessary-multiline-docstring + "D205", # Missing-blank-line-after-summary + "D400", # Missing-trailing-period + "D401", # Non-imperative-mood + "D404", # Docstring-starts-with-this ] extend-unsafe-fixes = [ "F401", # Unused imports @@ -185,12 +192,16 @@ extend-unsafe-fixes = [ "RUF003", # Ambiguous unicode character "NPY002", # Replace legacy `np.random.rand` call with Generator "B904", # Within an `except` clause, raise exceptions with from err or None + "D", # pydocstyle ] [tool.ruff.lint.isort] known-first-party = ["holoviews"] combine-as-imports = true +[tool.ruff.lint.pydocstyle] +convention = "numpy" + [tool.codespell] ignore-words-list = "lod,nd,ndoes,reenabled,spreaded,whn,ser,assertIn,anc" skip = "doc/generate_modules.py"