diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000..9208a689 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: d6ba2de41b45e5d2dc20749f85b988e1 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/_autosummary/gaps.batch.BatchJob.html b/_autosummary/gaps.batch.BatchJob.html new file mode 100644 index 00000000..d5181121 --- /dev/null +++ b/_autosummary/gaps.batch.BatchJob.html @@ -0,0 +1,711 @@ + + + + + + + + + + + gaps.batch.BatchJob — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.batch.BatchJob#

+
+
+class BatchJob(config)[source]#
+

Bases: object

+

Framework for building a batched job suite.

+

Based on reV-batch.

+

This framework allows users to modify key-value pairs in input +configuration files based on a top-level batch config file. This +framework will create run directories for all combinations of input +parametrics and run the corresponding GAPs pipelines for each job.

+
+
Parameters:
+

config (str) – File path to config json or csv (str).

+
+
+

Methods

+ + + + + + + + + + + + +

cancel()

Cancel all pipeline modules for all batch jobs.

delete()

Clear all of the batch sub job folders.

run([dry_run, monitor_background])

Run the batch job from a config file.

+

Attributes

+ + + + + + + + + +

job_table

Batch job summary table.

sub_dirs

Job sub directory paths.

+
+
+property job_table#
+

Batch job summary table.

+
+
Type:
+

pd.DataFrame

+
+
+
+ +
+
+property sub_dirs#
+

Job sub directory paths.

+
+
Type:
+

list

+
+
+
+ +
+
+cancel()[source]#
+

Cancel all pipeline modules for all batch jobs.

+
+ +
+
+delete()[source]#
+

Clear all of the batch sub job folders.

+

Only the batch sub folders listed in the job summary csv file +in the batch config directory are deleted.

+
+ +
+
+run(dry_run=False, monitor_background=False)[source]#
+

Run the batch job from a config file.

+
+
Parameters:
+
    +
  • dry_run (bool) – Flag to make job directories without running.

  • +
  • monitor_background (bool) – Flag to monitor all batch pipelines continuously +in the background. Note that the stdout/stderr will not be +captured, but you can set a pipeline “log_file” to capture +logs.

  • +
+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.batch.BatchSet.html b/_autosummary/gaps.batch.BatchSet.html new file mode 100644 index 00000000..52af8ef5 --- /dev/null +++ b/_autosummary/gaps.batch.BatchSet.html @@ -0,0 +1,693 @@ + + + + + + + + + + + gaps.batch.BatchSet — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.batch.BatchSet#

+
+
+class BatchSet(arg_combo, file_set, tag)#
+

Bases: tuple

+

Create new instance of BatchSet(arg_combo, file_set, tag)

+

Methods

+ + + + + + + + + +

count(value, /)

Return number of occurrences of value.

index(value[, start, stop])

Return first index of value.

+

Attributes

+ + + + + + + + + + + + +

arg_combo

Alias for field number 0

file_set

Alias for field number 1

tag

Alias for field number 2

+
+
+__add__(value, /)#
+

Return self+value.

+
+ +
+
+__mul__(value, /)#
+

Return self*value.

+
+ +
+
+arg_combo#
+

Alias for field number 0

+
+ +
+
+count(value, /)#
+

Return number of occurrences of value.

+
+ +
+
+file_set#
+

Alias for field number 1

+
+ +
+
+index(value, start=0, stop=9223372036854775807, /)#
+

Return first index of value.

+

Raises ValueError if the value is not present.

+
+ +
+
+tag#
+

Alias for field number 2

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.batch.html b/_autosummary/gaps.batch.html new file mode 100644 index 00000000..186899b2 --- /dev/null +++ b/_autosummary/gaps.batch.html @@ -0,0 +1,601 @@ + + + + + + + + + + + gaps.batch — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.batch#

+

GAPs batching framework for parametric runs.

+

Based on reV-batch.

+

Classes

+ + + + + + + + + +

BatchJob(config)

Framework for building a batched job suite.

BatchSet(arg_combo, file_set, tag)

Create new instance of BatchSet(arg_combo, file_set, tag)

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.batch.batch_command.html b/_autosummary/gaps.cli.batch.batch_command.html new file mode 100644 index 00000000..9dfd4555 --- /dev/null +++ b/_autosummary/gaps.cli.batch.batch_command.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.batch.batch_command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.batch.batch_command#

+
+
+batch_command()[source]#
+

Generate a batch command.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.batch.html b/_autosummary/gaps.cli.batch.html new file mode 100644 index 00000000..ffb7135b --- /dev/null +++ b/_autosummary/gaps.cli.batch.html @@ -0,0 +1,600 @@ + + + + + + + + + + + gaps.cli.batch — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.batch#

+

Batch Job CLI entry points.

+

Functions

+ + + + + + +

batch_command()

Generate a batch command.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.cli.as_click_command.html b/_autosummary/gaps.cli.cli.as_click_command.html new file mode 100644 index 00000000..d5e92108 --- /dev/null +++ b/_autosummary/gaps.cli.cli.as_click_command.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps.cli.cli.as_click_command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.cli.html b/_autosummary/gaps.cli.cli.html new file mode 100644 index 00000000..dac58206 --- /dev/null +++ b/_autosummary/gaps.cli.cli.html @@ -0,0 +1,603 @@ + + + + + + + + + + + gaps.cli.cli — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.cli#

+

Generation CLI entry points.

+

Functions

+ + + + + + + + + +

as_click_command(command_config)

Convert a GAPs command configuration object into a click command.

make_cli(commands[, info])

Create a pipeline CLI to split execution across HPC nodes.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.cli.make_cli.html b/_autosummary/gaps.cli.cli.make_cli.html new file mode 100644 index 00000000..fcc25172 --- /dev/null +++ b/_autosummary/gaps.cli.cli.make_cli.html @@ -0,0 +1,656 @@ + + + + + + + + + + + gaps.cli.cli.make_cli — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.cli.make_cli#

+
+
+make_cli(commands, info=None)[source]#
+

Create a pipeline CLI to split execution across HPC nodes.

+

This function generates a CLI for a package based on the input +command configurations. Each command configuration is based around +a function that is to be executed on one or more nodes.

+
+
Parameters:
+
    +
  • commands (list of command configurations) – List of command configs to convert to click commands. See the +CLICommandFromClass or +CLICommandFromFunction +documentation for a description of the input options. Each +command configuration is converted into a subcommand. Any +command configuration with project_points in the +split_keys argument will get a corresponding +collect-{command name} command that collects the outputs of +the spatially-distributed command.

  • +
  • info (dict, optional) – A dictionary that contains optional info about the calling +program to include in the CLI. Allowed keys include:

    +
    +
    +
    namestr

    Name of program to display at the top of the CLI help. +This input is optional, but specifying it yields richer +documentation for the main CLI command.

    +
    +
    versionstr

    Include program version with a --version CLI option.

    +
    +
    +
    +

    By default, None.

    +
  • +
+
+
Returns:
+

click.Group – A group containing the requested subcommands. This can be used +as the entrypoint to the package CLI.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.collect.collect.html b/_autosummary/gaps.cli.collect.collect.html new file mode 100644 index 00000000..234510ae --- /dev/null +++ b/_autosummary/gaps.cli.collect.collect.html @@ -0,0 +1,649 @@ + + + + + + + + + + + gaps.cli.collect.collect — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.collect.collect#

+
+
+collect(_out_path, _pattern, project_points=None, datasets=None, purge_chunks=False)[source]#
+

Run collection on local worker.

+

Collect data generated across multiple nodes into a single HDF5 +file.

+
+
Parameters:
+
    +
  • project_points (str | list, optional) – This input should represent the project points that correspond +to the full collection of points contained in the input HDF5 +files to be collected. You may simply point to a ProjectPoints +csv file that contains the GID’s that should be collected. You +may also input the GID’s as a list, though this may not be +suitable for collections with a large number of points. You may +also set this to input to None to generate a list of GID’s +automatically from the input files. By default, None.

  • +
  • datasets (list of str, optional) – List of dataset names to collect into the output file. If +collection is performed into multiple files (i.e. multiple input +patterns), this list can contain all relevant datasets across +all files (a warning wil be thrown, but it is safe to ignore +it). If None, all datasets from the input files are +collected. By default, None.

  • +
  • purge_chunks (bool, optional) – Option to delete single-node input HDF5 files. Note that the +input files will not be removed if any of the datasets they +contain have not been collected, regardless of the value of this +input. By default, False.

  • +
+
+
Returns:
+

str – Path to HDF5 file with the collected outputs.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.collect.html b/_autosummary/gaps.cli.collect.html new file mode 100644 index 00000000..e69b3c31 --- /dev/null +++ b/_autosummary/gaps.cli.collect.html @@ -0,0 +1,600 @@ + + + + + + + + + + + gaps.cli.collect — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.collect#

+

GAPs collection CLI entry points.

+

Functions

+ + + + + + +

collect(_out_path, _pattern[, ...])

Run collection on local worker.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.html b/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.html new file mode 100644 index 00000000..6e64adcd --- /dev/null +++ b/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.html @@ -0,0 +1,668 @@ + + + + + + + + + + + gaps.cli.command.AbstractBaseCLICommandConfiguration — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.command.AbstractBaseCLICommandConfiguration#

+
+
+class AbstractBaseCLICommandConfiguration(name, add_collect=False, split_keys=None, config_preprocessor=None, skip_doc_params=None)[source]#
+

Bases: ABC

+

Abstract Base CLI Command representation.

+

This base implementation provides helper methods to determine wether +a given command is split spatially.

+

Note that runner is a required part of the interface but is not +listed as an abstract property to avoid unnecessary function +wrapping.

+

Methods

+ + + +
+

Attributes

+ + + + + + + + + +

documentation

Documentation object.

is_split_spatially

True if execution is split spatially across nodes.

+
+
+property is_split_spatially#
+

True if execution is split spatially across nodes.

+
+
Type:
+

bool

+
+
+
+ +
+
+abstract property documentation#
+

Documentation object.

+
+
Type:
+

CommandDocumentation

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.command.CLICommandConfiguration.html b/_autosummary/gaps.cli.command.CLICommandConfiguration.html new file mode 100644 index 00000000..7b82d0fe --- /dev/null +++ b/_autosummary/gaps.cli.command.CLICommandConfiguration.html @@ -0,0 +1,621 @@ + + + + + + + + + + + gaps.cli.command.CLICommandConfiguration — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.command.CLICommandConfiguration#

+
+
+CLICommandConfiguration(name, function, split_keys=None, config_preprocessor=None)[source]#
+

This class is deprecated.

+

Please use CLICommandFromFunction

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.command.CLICommandFromClass.html b/_autosummary/gaps.cli.command.CLICommandFromClass.html new file mode 100644 index 00000000..2b9dbd0a --- /dev/null +++ b/_autosummary/gaps.cli.command.CLICommandFromClass.html @@ -0,0 +1,908 @@ + + + + + + + + + + + gaps.cli.command.CLICommandFromClass — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.command.CLICommandFromClass#

+
+
+class CLICommandFromClass(init, method, name=None, add_collect=False, split_keys=None, config_preprocessor=None, skip_doc_params=None)[source]#
+

Bases: AbstractBaseCLICommandConfiguration

+

Configure a CLI command to execute an object method on multiple nodes.

+

This class configures a CLI command that initializes runs a given +object and runs a particular method of that object across multiple +nodes on an HPC. The primary utility is to split the method +execution spatially, meaning that individual nodes will run the +method on a subset of the input points. However, this configuration +also supports splitting the execution on other inputs +(in lieu of or in addition to the geospatial partitioning).

+
+
Parameters:
+
    +
  • init (class) – The class to be initialized and used to to run on individual +nodes. The class must implement method. The initializer, +along with the corresponding method, will be used to +generate all the documentation and template configuration +files, so it should be thoroughly documented +(using a NumPy Style Python Docstring). +In particular, the “Extended Summary” (from the __init__ +method only) and the “Parameters” section (from both the +__init__ method and the run method given below) will +be pulled form the docstring.

    +
    +

    Warning

    +

    The “Extended Summary” section may not display +properly if the short “Summary” section in the +__init__ method is missing.

    +
    +
  • +
  • method (str) – The name of a method of the init class to act as the +model function to run across multiple nodes on the HPC. This +method must return the path to the output file it generates +(or a list of paths if multiple output files are generated). +If no output files are generated, the method must return +None or an empty list. In order to avoid clashing output +file names with jobs on other nodes, make sure to “request” +the tag argument. This method can “request” the +following arguments by including them in the method +signature (gaps will automatically pass them to the +method without any additional used input):

    +
    +
    +
    tagstr

    Short string unique to this job run that can be used +to generate unique output filenames, thereby +avoiding clashing output files with jobs on other +nodes. This string contains a leading underscore, +so the file name can easily be generated: +f"{out_file_name}{tag}.{extension}".

    +
    +
    command_namestr

    Name of the command being run. This is equivalent to +the name input argument.

    +
    +
    config_filestr

    Path to the configuration file specified by the +user.

    +
    +
    project_dirstr

    Path to the project directory (parent directory of +the configuration file).

    +
    +
    job_namestr

    Name of the job being run. This is typically a +combination of the project directory, the command +name, and a tag unique to a job. Note that the tag +will not be included if you request this argument +in a config preprocessing function, as the execution +has not been split into multiple jobs by that point.

    +
    +
    out_dirstr

    Path to output directory - typically equivalent to +the project directory.

    +
    +
    out_fpathstr

    Suggested path to output file. You are not required +to use this argument - it is provided purely for +convenience purposes. This argument combines the +out_dir with job_name to yield a unique +output filepath for a given node. Note that the +output filename will contain the tag. Also note that +this string WILL NOT contain a file-ending, so +that will have to be added by the node function.

    +
    +
    +
    +

    If your function is capable of multiprocessing, you should +also include max_workers in the function signature. +gaps will pass an integer equal to the number of +processes the user wants to run on a single node for this +value.

    +
    +

    Warning

    +

    The keywords {"max-workers", +"sites_per_worker", "memory_utilization_limit", +"timeout", "pool_size"} are assumed to +describe execution control. If you request any +of these as function arguments, users of your +CLI will specify them in the +execution_control block of the input config +file.

    +
    +

    Note that the config parameter is not allowed as +a function signature item. Please request all the required +keys/inputs directly instead. This function can also request +“private” arguments by including a leading underscore in the +argument name. These arguments are NOT exposed to users in +the documentation or template configuration files. Instead, +it is expected that the config_preprocessor function +fills these arguments in programmatically before the +function is distributed across nodes. See the implementation +of gaps.cli.collect.collect() and +gaps.cli.preprocessing.preprocess_collect_config() +for an example of this pattern. You can use the +skip_doc_params input below to achieve the same results +without the underscore syntax (helpful for public-facing +functions).

    +
  • +
  • name (str, optional) – Name of the command. This will be the name used to call the +command on the terminal. This name does not have to match +the function name. It is encouraged to use lowercase names +with dashes (“-”) instead of underscores (“_”) to stay +consistent with click’s naming conventions. By default, +None, which uses the method name (with minor +formatting to conform to click-style commands).

  • +
  • add_collect (bool, optional) – Option to add a “collect-{command_name}” command immediately +following this command to collect the (multiple) output +files generated across nodes into a single file. The collect +command will only work if the output files the previous +command generates are HDF5 files with meta and +time_index datasets (standard rex HDF5 file +structure). If you set this option to True, your run +function must return the path (as a string) to the output +file it generates in order for users to be able to use +"PIPELINE" as the input to the collect_pattern key +in the collection config. The path returned by this function +must also include the tag in the output file name in +order for collection to function properly (an easy way to do +this is to request tag in the function signature and +name the output file generated by the function using the +f"{out_file_name}{tag}.{extension}" format). +By default, False.

  • +
  • split_keys (set | container, optional) – A set of strings identifying the names of the config keys +that gaps should split the function execution on. To +specify geospatial partitioning in particular, ensure that +the main function has a “project_points” argument (which +accepts a gaps.project_points.ProjectPoints +instance) and specify “project_points” as a split argument. +Users of the CLI will only need to specify the path to the +project points file and a “nodes” argument in the execution +control. To split execution on additional/other inputs, +include them by name in this input (and ensure the run +function accepts them as input). You may include tuples of +strings in this iterable as well. Tuples of strings will be +interpreted as combinations of keys whose values should be +iterated over simultaneously. For example, specifying +split_keys=[("a", "b")] and invoking with a config file +where a = [1, 2] and b = [3, 4] will run the main +function two times (on two nodes), first with the inputs +a=1, b=3 and then with the inputs a=2, b=4. +It is the responsibility of the developer using this class +to ensure that the user input for all split_keys is an +iterable (typically a list), and that the lengths of all +“paired” keys match. To allow non-iterable user input for +split keys, use the config_preprocessor argument to +specify a preprocessing function that converts the user +input into a list of the expected inputs. If users specify +an empty list or None for a key in split_keys, then +GAPs will pass None as the value for that key (i.e. if +split_keys=["a"] and users specify "a": [] in their +config, then the function will be called with +a=None). If None, execution is not split across +nodes, and a single node is always used for the function +call. By default, None.

  • +
  • config_preprocessor (callable, optional) – Optional function for configuration pre-processing. The +preprocessing step occurs before jobs are split across HPC +nodes, and can therefore be used to calculate the +split_keys input and/or validate that it conforms to the +requirements layed out above. At minimum, this function +should have “config” as the first parameter (which will +receive the user configuration input as a dictionary) and +must return the updated config dictionary. This function +can also “request” the following arguments by including them +in the function signature:

    +
    +
    +
    command_namestr

    Name of the command being run. This is equivalent to +the name input above.

    +
    +
    config_filePath

    Path to the configuration file specified by the +user.

    +
    +
    project_dirPath

    Path to the project directory (parent directory of +the configuration file).

    +
    +
    job_namestr

    Name of the job being run. This is typically a +combination of the project directory and the command +name.

    +
    +
    out_dirPath

    Path to output directory - typically equivalent to +the project directory.

    +
    +
    out_fpathPath

    Suggested path to output file. You are not required +to use this argument - it is provided purely for +convenience purposes. This argument combines the +out_dir with job_name to yield a unique +output filepath for a given node. Note that the +output filename WILL NOT contain the tag, since +the number of split nodes have not been determined +when the config pre-processing function is called. +Also note that this string WILL NOT contain a +file-ending, so that will have to be added by the +node function.

    +
    +
    log_directoryPath

    Path to log output directory (defaults to +project_dir / “logs”).

    +
    +
    verbosebool

    Flag indicating wether the user has selected a DEBUG +verbosity level for logs.

    +
    +
    +
    +

    These inputs will be provided by GAPs and will not be +displayed to users in the template configuration files or +documentation. See +gaps.cli.preprocessing.preprocess_collect_config() +for an example. Note that the tag parameter is not +allowed as a pre-processing function signature item (the +node jobs will not have been configured before this function +executes). This function can also “request” new user inputs +that are not present in the signature of the main run +function. In this case, the documentation for these new +arguments is pulled from the config_preprocessor +function. This feature can be used to request auxillary +information from the user to fill in “private” inputs to the +main run function. See the implementation of +gaps.cli.preprocessing.preprocess_collect_config() and +gaps.cli.collect.collect() for an example of this +pattern. Do not request parameters with the same names as +any of your model function (i.e. if res_file is a model +parameter, do not request it in the preprocessing function +docstring - extract it from the config dictionary instead). +By default, None.

    +
  • +
  • skip_doc_params (iterable of str, optional) – Optional iterable of parameter names that should be excluded +from the documentation/template configuration files. This +can be useful if your pre-processing function automatically +sets some parameters based on other user input. This option +is an alternative to the “private” arguments discussed in +the function parameter documentation above. By default, +None.

  • +
+
+
+

Methods

+ + + +
+

Attributes

+ + + + + + + + + +

documentation

Documentation object.

is_split_spatially

True if execution is split spatially across nodes.

+
+
+property documentation#
+

Documentation object.

+
+
Type:
+

CommandDocumentation

+
+
+
+ +
+
+property is_split_spatially#
+

True if execution is split spatially across nodes.

+
+
Type:
+

bool

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.command.CLICommandFromFunction.html b/_autosummary/gaps.cli.command.CLICommandFromFunction.html new file mode 100644 index 00000000..c936f213 --- /dev/null +++ b/_autosummary/gaps.cli.command.CLICommandFromFunction.html @@ -0,0 +1,901 @@ + + + + + + + + + + + gaps.cli.command.CLICommandFromFunction — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.command.CLICommandFromFunction#

+
+
+class CLICommandFromFunction(function, name=None, add_collect=False, split_keys=None, config_preprocessor=None, skip_doc_params=None)[source]#
+

Bases: AbstractBaseCLICommandConfiguration

+

Configure a CLI command to execute a function on multiple nodes.

+

This class configures a CLI command that runs a given function +across multiple nodes on an HPC. The primary utility is to split the +function execution spatially, meaning that individual nodes will run +the function on a subset of the input points. However, this +configuration also supports splitting the execution on other inputs +(in lieu of or in addition to the geospatial partitioning).

+
+
Parameters:
+
    +
  • function (callable) – The function to run on individual nodes. This function will +be used to generate all the documentation and template +configuration files, so it should be thoroughly documented +(using a NumPy Style Python Docstring). +In particular, the “Extended Summary” and the “Parameters” +section will be pulled form the docstring.

    +
    +

    Warning

    +

    The “Extended Summary” section may not show up +properly if the short “Summary” section is +missing.

    +
    +

    This function must return the path to the output file it +generates (or a list of paths if multiple output files are +generated). If no output files are generated, the function +must return None or an empty list. In order to avoid +clashing output file names with jobs on other nodes, make +sure to “request” the tag argument. This function can +“request” the following arguments by including them in the +function signature (gaps will automatically pass them to +the function without any additional used input):

    +
    +
    +
    tagstr

    Short string unique to this job run that can be used +to generate unique output filenames, thereby +avoiding clashing output files with jobs on other +nodes. This string contains a leading underscore, +so the file name can easily be generated: +f"{out_file_name}{tag}.{extension}".

    +
    +
    command_namestr

    Name of the command being run. This is equivalent to +the name input argument.

    +
    +
    config_filestr

    Path to the configuration file specified by the +user.

    +
    +
    project_dirstr

    Path to the project directory (parent directory of +the configuration file).

    +
    +
    job_namestr

    Name of the job being run. This is typically a +combination of the project directory, the command +name, and a tag unique to a job. Note that the tag +will not be included if you request this argument +in a config preprocessing function, as the execution +has not been split into multiple jobs by that point.

    +
    +
    out_dirstr

    Path to output directory - typically equivalent to +the project directory.

    +
    +
    out_fpathstr

    Suggested path to output file. You are not required +to use this argument - it is provided purely for +convenience purposes. This argument combines the +out_dir with job_name to yield a unique +output filepath for a given node. Note that the +output filename will contain the tag. Also note that +this string WILL NOT contain a file-ending, so +that will have to be added by the node function.

    +
    +
    +
    +

    If your function is capable of multiprocessing, you should +also include max_workers in the function signature. +gaps will pass an integer equal to the number of +processes the user wants to run on a single node for this +value.

    +
    +

    Warning

    +

    The keywords {"max-workers", +"sites_per_worker", "memory_utilization_limit", +"timeout", "pool_size"} are assumed to +describe execution control. If you request any +of these as function arguments, users of your +CLI will specify them in the +execution_control block of the input config +file.

    +
    +

    Note that the config parameter is not allowed as +a function signature item. Please request all the required +keys/inputs directly instead. This function can also request +“private” arguments by including a leading underscore in the +argument name. These arguments are NOT exposed to users in +the documentation or template configuration files. Instead, +it is expected that the config_preprocessor function +fills these arguments in programmatically before the +function is distributed across nodes. See the implementation +of gaps.cli.collect.collect() and +gaps.cli.preprocessing.preprocess_collect_config() +for an example of this pattern. You can use the +skip_doc_params input below to achieve the same results +without the underscore syntax (helpful for public-facing +functions).

    +
  • +
  • name (str, optional) – Name of the command. This will be the name used to call the +command on the terminal. This name does not have to match +the function name. It is encouraged to use lowercase names +with dashes (“-”) instead of underscores (“_”) to stay +consistent with click’s naming conventions. By default, +None, which uses the function name as the command name +(with minor formatting to conform to click-style +commands).

  • +
  • add_collect (bool, optional) – Option to add a “collect-{command_name}” command immediately +following this command to collect the (multiple) output +files generated across nodes into a single file. The collect +command will only work if the output files the previous +command generates are HDF5 files with meta and +time_index datasets (standard rex HDF5 file +structure). If you set this option to True, your run +function must return the path (as a string) to the output +file it generates in order for users to be able to use +"PIPELINE" as the input to the collect_pattern key +in the collection config. The path returned by this function +must also include the tag in the output file name in +order for collection to function properly (an easy way to do +this is to request tag in the function signature and +name the output file generated by the function using the +f"{out_file_name}{tag}.{extension}" format). +By default, False.

  • +
  • split_keys (set | container, optional) – A set of strings identifying the names of the config keys +that gaps should split the function execution on. To +specify geospatial partitioning in particular, ensure that +the main function has a “project_points” argument (which +accepts a gaps.project_points.ProjectPoints +instance) and specify “project_points” as a split argument. +Users of the CLI will only need to specify the path to the +project points file and a “nodes” argument in the execution +control. To split execution on additional/other inputs, +include them by name in this input (and ensure the run +function accepts them as input). You may include tuples of +strings in this iterable as well. Tuples of strings will be +interpreted as combinations of keys whose values should be +iterated over simultaneously. For example, specifying +split_keys=[("a", "b")] and invoking with a config file +where a = [1, 2] and b = [3, 4] will run the main +function two times (on two nodes), first with the inputs +a=1, b=3 and then with the inputs a=2, b=4. +It is the responsibility of the developer using this class +to ensure that the user input for all split_keys is an +iterable (typically a list), and that the lengths of all +“paired” keys match. To allow non-iterable user input for +split keys, use the config_preprocessor argument to +specify a preprocessing function that converts the user +input into a list of the expected inputs. If users specify +an empty list or None for a key in split_keys, then +GAPs will pass None as the value for that key (i.e. if +split_keys=["a"] and users specify "a": [] in their +config, then the function will be called with +a=None). If None, execution is not split across +nodes, and a single node is always used for the function +call. By default, None.

  • +
  • config_preprocessor (callable, optional) – Optional function for configuration pre-processing. The +preprocessing step occurs before jobs are split across HPC +nodes, and can therefore be used to calculate the +split_keys input and/or validate that it conforms to the +requirements layed out above. At minimum, this function +should have “config” as the first parameter (which will +receive the user configuration input as a dictionary) and +must return the updated config dictionary. This function +can also “request” the following arguments by including them +in the function signature:

    +
    +
    +
    command_namestr

    Name of the command being run. This is equivalent to +the name input above.

    +
    +
    config_filePath

    Path to the configuration file specified by the +user.

    +
    +
    project_dirPath

    Path to the project directory (parent directory of +the configuration file).

    +
    +
    job_namestr

    Name of the job being run. This is typically a +combination of the project directory and the command +name.

    +
    +
    out_dirPath

    Path to output directory - typically equivalent to +the project directory.

    +
    +
    out_fpathPath

    Suggested path to output file. You are not required +to use this argument - it is provided purely for +convenience purposes. This argument combines the +out_dir with job_name to yield a unique +output filepath for a given node. Note that the +output filename WILL NOT contain the tag, since +the number of split nodes have not been determined +when the config pre-processing function is called. +Also note that this string WILL NOT contain a +file-ending, so that will have to be added by the +node function.

    +
    +
    log_directoryPath

    Path to log output directory (defaults to +project_dir / “logs”).

    +
    +
    verbosebool

    Flag indicating wether the user has selected a DEBUG +verbosity level for logs.

    +
    +
    +
    +

    These inputs will be provided by GAPs and will not be +displayed to users in the template configuration files or +documentation. See +gaps.cli.preprocessing.preprocess_collect_config() +for an example. Note that the tag parameter is not +allowed as a pre-processing function signature item (the +node jobs will not have been configured before this function +executes). This function can also “request” new user inputs +that are not present in the signature of the main run +function. In this case, the documentation for these new +arguments is pulled from the config_preprocessor +function. This feature can be used to request auxillary +information from the user to fill in “private” inputs to the +main run function. See the implementation of +gaps.cli.preprocessing.preprocess_collect_config() and +gaps.cli.collect.collect() for an example of this +pattern. Do not request parameters with the same names as +any of your model function (i.e. if res_file is a model +parameter, do not request it in the preprocessing function +docstring - extract it from the config dictionary instead). +By default, None.

    +
  • +
  • skip_doc_params (iterable of str, optional) – Optional iterable of parameter names that should be excluded +from the documentation/template configuration files. This +can be useful if your pre-processing function automatically +sets some parameters based on other user input. This option +is an alternative to the “private” arguments discussed in +the function parameter documentation above. By default, +None.

  • +
+
+
+

Methods

+ + + +
+

Attributes

+ + + + + + + + + +

documentation

Documentation object.

is_split_spatially

True if execution is split spatially across nodes.

+
+
+property documentation#
+

Documentation object.

+
+
Type:
+

CommandDocumentation

+
+
+
+ +
+
+property is_split_spatially#
+

True if execution is split spatially across nodes.

+
+
Type:
+

bool

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.command.html b/_autosummary/gaps.cli.command.html new file mode 100644 index 00000000..04ed5be7 --- /dev/null +++ b/_autosummary/gaps.cli.command.html @@ -0,0 +1,614 @@ + + + + + + + + + + + gaps.cli.command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.command#

+

GAPs command configuration preprocessing functions.

+

Functions

+ + + + + + +

CLICommandConfiguration(name, function[, ...])

This class is deprecated.

+

Classes

+ + + + + + + + + + + + +

AbstractBaseCLICommandConfiguration(name[, ...])

Abstract Base CLI Command representation.

CLICommandFromClass(init, method[, name, ...])

Configure a CLI command to execute an object method on multiple nodes.

CLICommandFromFunction(function[, name, ...])

Configure a CLI command to execute a function on multiple nodes.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.config.as_script_str.html b/_autosummary/gaps.cli.config.as_script_str.html new file mode 100644 index 00000000..ed2a1d1d --- /dev/null +++ b/_autosummary/gaps.cli.config.as_script_str.html @@ -0,0 +1,641 @@ + + + + + + + + + + + gaps.cli.config.as_script_str — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.config.as_script_str#

+
+
+as_script_str(input_)[source]#
+

Convert input to how it would appear in a python script.

+

Essentially this means the input is dumped to json format with some +minor replacements (e.g. null -> None, etc.). Importantly, all +string inputs are wrapped in double quotes.

+
+
Parameters:
+

input_ (obj) – Any object that can be serialized with json.dumps().

+
+
Returns:
+

str – Input object as it would appear in a python script.

+
+
+

Examples

+
>>> as_script_str("a")
+"a"
+
+
+
>>> as_script_str({"a": None, "b": True, "c": False, "d": 3,
+...                "e": [{"t": "hi"}]})
+{"a": None, "b": True, "c": False, "d": 3, "e": [{"t": "hi"}]}
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.config.from_config.html b/_autosummary/gaps.cli.config.from_config.html new file mode 100644 index 00000000..e9a1d82e --- /dev/null +++ b/_autosummary/gaps.cli.config.from_config.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.config.from_config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.config.from_config#

+
+
+from_config(ctx, config_file, command_config)[source]#
+

Run command from a config file.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.config.html b/_autosummary/gaps.cli.config.html new file mode 100644 index 00000000..a5e138d3 --- /dev/null +++ b/_autosummary/gaps.cli.config.html @@ -0,0 +1,609 @@ + + + + + + + + + + + gaps.cli.config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.config#

+

GAPs CLI command for spatially distributed function runs.

+

Functions

+ + + + + + + + + + + + + + + +

as_script_str(input_)

Convert input to how it would appear in a python script.

from_config(ctx, config_file, command_config)

Run command from a config file.

node_kwargs(run_func, config)

Compile the function inputs arguments for a particular node.

run_with_status_updates(run_func, config, ...)

Run a function and write status updated before/after execution.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.config.node_kwargs.html b/_autosummary/gaps.cli.config.node_kwargs.html new file mode 100644 index 00000000..2e94dcbc --- /dev/null +++ b/_autosummary/gaps.cli.config.node_kwargs.html @@ -0,0 +1,648 @@ + + + + + + + + + + + gaps.cli.config.node_kwargs — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.config.node_kwargs#

+
+
+node_kwargs(run_func, config)[source]#
+

Compile the function inputs arguments for a particular node.

+
+
Parameters:
+
    +
  • run_func (callable) – A function to run.

  • +
  • config (dict) – Dictionary of node-specific inputs to run_func.

  • +
  • logging_options (dict) – Dictionary of logging options containing at least the following +key-value pairs:

    +
    +
    +
    namestr

    Job name; name of log file.

    +
    +
    log_directorypath-like

    Path to log file directory.

    +
    +
    verbosebool

    Option to turn on debug logging.

    +
    +
    nodebool

    Flag for whether this is a node-level logger. If this is +a node logger, and the log level is info, the log_file +will be None (sent to stdout).

    +
    +
    +
    +
  • +
+
+
Returns:
+

dict – Function run kwargs to be used on this node.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.config.run_with_status_updates.html b/_autosummary/gaps.cli.config.run_with_status_updates.html new file mode 100644 index 00000000..d7195581 --- /dev/null +++ b/_autosummary/gaps.cli.config.run_with_status_updates.html @@ -0,0 +1,650 @@ + + + + + + + + + + + gaps.cli.config.run_with_status_updates — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.config.run_with_status_updates#

+
+
+run_with_status_updates(run_func, config, logging_options, status_update_args, exclude)[source]#
+

Run a function and write status updated before/after execution.

+
+
Parameters:
+
    +
  • run_func (callable) – A function to run.

  • +
  • config (dict) – Dictionary of node-specific inputs to run_func.

  • +
  • logging_options (dict) – Dictionary of logging options containing at least the following +key-value pairs:

    +
    +
    +
    namestr

    Job name; name of log file.

    +
    +
    log_directorypath-like

    Path to log file directory.

    +
    +
    verbosebool

    Option to turn on debug logging.

    +
    +
    nodebool

    Flag for whether this is a node-level logger. If this is +a node logger, and the log level is info, the log_file +will be None (sent to stdout).

    +
    +
    +
    +
  • +
  • status_update_args (iterable) – An iterable containing the first three initializer arguments for +StatusUpdates.

  • +
  • exclude (collection | None) – A collection (list, set, dict, etc.) of keys that should be +excluded from the job status file that is written before/after +the function runs.

  • +
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.documentation.CommandDocumentation.html b/_autosummary/gaps.cli.documentation.CommandDocumentation.html new file mode 100644 index 00000000..3925fe87 --- /dev/null +++ b/_autosummary/gaps.cli.documentation.CommandDocumentation.html @@ -0,0 +1,802 @@ + + + + + + + + + + + gaps.cli.documentation.CommandDocumentation — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.documentation.CommandDocumentation#

+
+
+class CommandDocumentation(*functions, skip_params=None, is_split_spatially=False)[source]#
+

Bases: object

+

Generate documentation for a command.

+

Commands are typically comprised of one or more functions. This +definition includes class initializers and object methods. +Documentation is compiled from all input functions and used to +generate CLI help docs and template configuration files.

+
+
Parameters:
+
    +
  • *functions (callables) – Functions that comprise a single command for which to +generate documentation. IMPORTANT The extended summary +will be pulled form the first function only!

  • +
  • skip_params (set, optional) – Set of parameter names (str) to exclude from documentation. +Typically this is because the user would not explicitly have +to specify these. By default, None.

  • +
  • is_split_spatially (bool, optional) – Flag indicating wether or not this function is split +spatially across nodes. If True, a “nodes” option is added +to the execution control block of the generated +documentation. By default, False.

  • +
+
+
+

Methods

+ + + + + + + + + + + + +

command_help(command_name)

Generate a help string for a command.

config_help(command_name)

Generate a config help string for a command.

param_required(param)

Check wether a parameter is a required input for the run function.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

REQUIRED_TAG

default_exec_values

Default "execution_control" config.

exec_control_doc

Execution_control documentation.

extended_summary

Function extended summary, with extra whitespace stripped.

parameter_help

Parameter help for the func, including execution control.

required_args

Required parameters of the input function.

template_config

A template configuration file for this function.

+
+
+property default_exec_values#
+

Default “execution_control” config.

+
+
Type:
+

dict

+
+
+
+ +
+
+property exec_control_doc#
+

Execution_control documentation.

+
+
Type:
+

str

+
+
+
+ +
+
+property required_args#
+

Required parameters of the input function.

+
+
Type:
+

set

+
+
+
+ +
+
+property template_config#
+

A template configuration file for this function.

+
+
Type:
+

dict

+
+
+
+ +
+
+property parameter_help#
+

Parameter help for the func, including execution control.

+
+
Type:
+

str

+
+
+
+ +
+
+property extended_summary#
+

Function extended summary, with extra whitespace stripped.

+
+
Type:
+

str

+
+
+
+ +
+
+config_help(command_name)[source]#
+

Generate a config help string for a command.

+
+
Parameters:
+

command_name (str) – Name of command for which the config help is being +generated.

+
+
Returns:
+

str – Help string for the config file.

+
+
+
+ +
+
+command_help(command_name)[source]#
+

Generate a help string for a command.

+
+
Parameters:
+

command_name (str) – Name of command for which the help string is being +generated.

+
+
Returns:
+

str – Help string for the command.

+
+
+
+ +
+
+param_required(param)[source]#
+

Check wether a parameter is a required input for the run function.

+
+
Parameters:
+

param (str) – Name of parameter to check.

+
+
Returns:
+

boolTrue if param is a required parameter of func.

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.documentation.html b/_autosummary/gaps.cli.documentation.html new file mode 100644 index 00000000..8436185b --- /dev/null +++ b/_autosummary/gaps.cli.documentation.html @@ -0,0 +1,600 @@ + + + + + + + + + + + gaps.cli.documentation — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.documentation#

+

CLI documentation utilities.

+

Classes

+ + + + + + +

CommandDocumentation(*functions[, ...])

Generate documentation for a command.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.execution.html b/_autosummary/gaps.cli.execution.html new file mode 100644 index 00000000..53eb7c04 --- /dev/null +++ b/_autosummary/gaps.cli.execution.html @@ -0,0 +1,600 @@ + + + + + + + + + + + gaps.cli.execution — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.execution#

+

GAPs execution CLI utils.

+

Functions

+ + + + + + +

kickoff_job(ctx, cmd, exec_kwargs)

Kickoff a single job (a single command execution).

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.execution.kickoff_job.html b/_autosummary/gaps.cli.execution.kickoff_job.html new file mode 100644 index 00000000..2d289b40 --- /dev/null +++ b/_autosummary/gaps.cli.execution.kickoff_job.html @@ -0,0 +1,648 @@ + + + + + + + + + + + gaps.cli.execution.kickoff_job — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.execution.kickoff_job#

+
+
+kickoff_job(ctx, cmd, exec_kwargs)[source]#
+

Kickoff a single job (a single command execution).

+
+
Parameters:
+
    +
  • ctx (click.Context) – Context object with a .obj attribute that contains at least +the following keys:

    +
    +
    +
    NAMEstr

    Job name.

    +
    +
    OUT_DIRpath-like

    Path to output directory.

    +
    +
    COMMAND_NAMEstr

    Name of command being run.

    +
    +
    +
    +
  • +
  • cmd (str) – String form of command to kickoff.

  • +
  • exec_kwargs (dict) – Keyword-value pairs to pass to the respective submit function. +These will be filtered, so they may contain extra values. If +some required inputs are missing from this dictionary, a +gapsConfigError is raised.

  • +
+
+
Raises:
+

gapsConfigError – If exec_kwargs is missing some arguments required by the + respective submit function.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.html b/_autosummary/gaps.cli.html new file mode 100644 index 00000000..c435fc55 --- /dev/null +++ b/_autosummary/gaps.cli.html @@ -0,0 +1,625 @@ + + + + + + + + + + + gaps.cli — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli#

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

gaps.cli.batch

Batch Job CLI entry points.

gaps.cli.cli

Generation CLI entry points.

gaps.cli.collect

GAPs collection CLI entry points.

gaps.cli.command

GAPs command configuration preprocessing functions.

gaps.cli.config

GAPs CLI command for spatially distributed function runs.

gaps.cli.documentation

CLI documentation utilities.

gaps.cli.execution

GAPs execution CLI utils.

gaps.cli.pipeline

GAPs pipeline CLI entry points.

gaps.cli.preprocessing

GAPs config preprocessing functions.

gaps.cli.status

GAPs Status Monitor

gaps.cli.templates

GAPs CLI template config generation command.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.pipeline.html b/_autosummary/gaps.cli.pipeline.html new file mode 100644 index 00000000..d54fbcd5 --- /dev/null +++ b/_autosummary/gaps.cli.pipeline.html @@ -0,0 +1,606 @@ + + + + + + + + + + + gaps.cli.pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.pipeline#

+

GAPs pipeline CLI entry points.

+

Functions

+ + + + + + + + + + + + +

pipeline(ctx, config_file, cancel, monitor)

Execute multiple steps in an analysis pipeline.

pipeline_command(template_config)

Generate a pipeline command.

template_pipeline_config(commands)

Generate a template pipeline config based on the commands.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.pipeline.pipeline.html b/_autosummary/gaps.cli.pipeline.pipeline.html new file mode 100644 index 00000000..b9a2fc4c --- /dev/null +++ b/_autosummary/gaps.cli.pipeline.pipeline.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.pipeline.pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.pipeline.pipeline#

+
+
+pipeline(ctx, config_file, cancel, monitor, background=False)[source]#
+

Execute multiple steps in an analysis pipeline.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.pipeline.pipeline_command.html b/_autosummary/gaps.cli.pipeline.pipeline_command.html new file mode 100644 index 00000000..d4027d30 --- /dev/null +++ b/_autosummary/gaps.cli.pipeline.pipeline_command.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.pipeline.pipeline_command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.pipeline.pipeline_command#

+
+
+pipeline_command(template_config)[source]#
+

Generate a pipeline command.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.pipeline.template_pipeline_config.html b/_autosummary/gaps.cli.pipeline.template_pipeline_config.html new file mode 100644 index 00000000..88c2d26a --- /dev/null +++ b/_autosummary/gaps.cli.pipeline.template_pipeline_config.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.pipeline.template_pipeline_config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.pipeline.template_pipeline_config#

+
+
+template_pipeline_config(commands)[source]#
+

Generate a template pipeline config based on the commands.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.preprocessing.html b/_autosummary/gaps.cli.preprocessing.html new file mode 100644 index 00000000..d0727871 --- /dev/null +++ b/_autosummary/gaps.cli.preprocessing.html @@ -0,0 +1,603 @@ + + + + + + + + + + + gaps.cli.preprocessing — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + + + + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.html b/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.html new file mode 100644 index 00000000..69d9b181 --- /dev/null +++ b/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.html @@ -0,0 +1,659 @@ + + + + + + + + + + + gaps.cli.preprocessing.preprocess_collect_config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.preprocessing.preprocess_collect_config#

+
+
+preprocess_collect_config(config, project_dir, command_name, collect_pattern='PIPELINE')[source]#
+

Pre-process collection config.

+

Specifically, the “collect_pattern” key is resolved into a list of +2-tuples, where the first element in each tuple is a path to the +collection output file, while the second element is the +corresponding filepath-pattern representing the files to be +collected into the output file.

+
+
Parameters:
+
    +
  • config (dict) – Collection config. This config will be updated to include a +“collect_pattern” key if it doesn’t already have one. If the +“collect_pattern” key exists, it can be a string, a collection, +or a mapping with a .items() method.

  • +
  • project_dir (path-like) – Path to project directory. This path is used to resolve the +out filepath input from the user.

  • +
  • command_name (str) – Name of the command being run. This is used to parse the +pipeline status for output files if +collect_pattern="PIPELINE" in the input config.

  • +
  • collect_pattern (str | list | dict, optional) – Unix-style /filepath/pattern*.h5 representing the files to +be collected into a single output HDF5 file. If no output file +path is specified (i.e. this input is a single pattern or a list +of patterns), the output file path will be inferred from the +pattern itself (specifically, the wildcard will be removed +and the result will be the output file path). If a list of +patterns is provided, each pattern will be collected into a +separate output file. To specify the name of the output file(s), +set this input to a dictionary where the keys are paths to the +output file (including the filename itself; relative paths are +allowed) and the values are patterns representing the files that +should be collected into the output file. If running a collect +job as part of a pipeline, this input can be set to +"PIPELINE", which will parse the output of the previous step +and generate the input file pattern and output file name +automatically. By default, "PIPELINE".

  • +
+
+
Returns:
+

dict – Updated collection config.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.html b/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.html new file mode 100644 index 00000000..fcd42673 --- /dev/null +++ b/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.html @@ -0,0 +1,630 @@ + + + + + + + + + + + gaps.cli.preprocessing.split_project_points_into_ranges — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.preprocessing.split_project_points_into_ranges#

+
+
+split_project_points_into_ranges(config)[source]#
+

Split project points into ranges inside of config.

+
+
Parameters:
+

config (dict) – Run config. This config must have a “project_points” input that +can be used to initialize ProjectPoints.

+
+
Returns:
+

dict – Run config with a “project_points_split_range” key that split +the project points into multiple ranges based on node input.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.status.html b/_autosummary/gaps.cli.status.html new file mode 100644 index 00000000..c1c53a90 --- /dev/null +++ b/_autosummary/gaps.cli.status.html @@ -0,0 +1,603 @@ + + + + + + + + + + + gaps.cli.status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.status#

+

GAPs Status Monitor

+

Functions

+ + + + + + + + + +

main_monitor(folder, commands, status, ...)

Run the appropriate monitor functions for a folder.

status_command()

A status CLI command.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.status.main_monitor.html b/_autosummary/gaps.cli.status.main_monitor.html new file mode 100644 index 00000000..961661e4 --- /dev/null +++ b/_autosummary/gaps.cli.status.main_monitor.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps.cli.status.main_monitor — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.status.main_monitor#

+
+
+main_monitor(folder, commands, status, include, recursive)[source]#
+

Run the appropriate monitor functions for a folder.

+
+
Parameters:
+
    +
  • folder (path-like) – Path to folder for which to print status.

  • +
  • commands (container of str) – Container with the commands to display.

  • +
  • status (container of str) – Container with the statuses to display.

  • +
  • include (container of str) – Container of extra keys to pull from the status files.

  • +
  • recursive (bool) – Option to perform recursive search of directories.

  • +
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.status.status_command.html b/_autosummary/gaps.cli.status.status_command.html new file mode 100644 index 00000000..15c1b657 --- /dev/null +++ b/_autosummary/gaps.cli.status.status_command.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.status.status_command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.status.status_command#

+
+
+status_command()[source]#
+

A status CLI command.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.templates.html b/_autosummary/gaps.cli.templates.html new file mode 100644 index 00000000..9044c7a2 --- /dev/null +++ b/_autosummary/gaps.cli.templates.html @@ -0,0 +1,600 @@ + + + + + + + + + + + gaps.cli.templates — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.templates#

+

GAPs CLI template config generation command.

+

Functions

+ + + + + + +

template_command(template_configs)

A template config generation CLI command.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.cli.templates.template_command.html b/_autosummary/gaps.cli.templates.template_command.html new file mode 100644 index 00000000..658c0e27 --- /dev/null +++ b/_autosummary/gaps.cli.templates.template_command.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.cli.templates.template_command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.cli.templates.template_command#

+
+
+template_command(template_configs)[source]#
+

A template config generation CLI command.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.Collector.html b/_autosummary/gaps.collection.Collector.html new file mode 100644 index 00000000..0a470159 --- /dev/null +++ b/_autosummary/gaps.collection.Collector.html @@ -0,0 +1,821 @@ + + + + + + + + + + + gaps.collection.Collector — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.Collector#

+
+
+class Collector(h5_file, collect_pattern, project_points, clobber=False)[source]#
+

Bases: object

+

Collector of multiple source files into a single output file.

+
+
Parameters:
+
    +
  • h5_file (path-like) – Path to output HDF5 file into which data will be collected.

  • +
  • collect_pattern (str) – Unix-style /filepath/pattern*.h5 representing a list of +input files to be collected into a single HDF5 file.

  • +
  • project_points (str | slice | list | pandas.DataFrame | None) – Project points that correspond to the full collection of +points contained in the HDF5 files to be collected. None +if points list is to be ignored (i.e. collect all data in +the input HDF5 files without checking that all gids are +there).

  • +
  • clobber (bool, optional) – Flag to purge output HDF5 file if it already exists. +By default, False.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + +

add_dataset(h5_file, collect_pattern, dataset_in)

Collect and add a dataset to a single HDF5 file.

collect(dataset_in[, dataset_out, ...])

Collect a dataset from h5_dir to h5_file

combine_meta()

Combine meta data from input HDF5 files and write to out file.

combine_time_index()

Combine time_index from input HDF5 files and write to out file.

get_dataset_shape(dataset_name)

Extract dataset shape from the first file in the collection list.

move_chunks([sub_dir])

Move chunked files from a directory to a sub-directory.

purge_chunks()

Remove chunked files from a directory.

+

Attributes

+ + + + + + + + + +

gids

List of gids corresponding to all sites to be combined.

h5_files

List of paths to HDF5 files to be combined.

+
+
+get_dataset_shape(dataset_name)[source]#
+

Extract dataset shape from the first file in the collection list.

+
+
Parameters:
+

dataset_name (str) – Dataset to be collected whose shape is in question.

+
+
Returns:
+

shape (tuple) – Dataset shape tuple.

+
+
+
+ +
+
+property h5_files#
+

List of paths to HDF5 files to be combined.

+
+
Type:
+

list

+
+
+
+ +
+
+property gids#
+

List of gids corresponding to all sites to be combined.

+
+
Type:
+

list

+
+
+
+ +
+
+combine_meta()[source]#
+

Combine meta data from input HDF5 files and write to out file.

+
+ +
+
+combine_time_index()[source]#
+

Combine time_index from input HDF5 files and write to out file.

+

If time_index is not given in the input HDF5 files, the +time_index in the output file is set to None.

+
+ +
+
+purge_chunks()[source]#
+

Remove chunked files from a directory.

+
+
Warns:
+

gapsCollectionWarning – If some datasets have not been collected.

+
+
+
+

Warning

+

This function WILL NOT delete files if any datasets were not +collected.

+
+
+ +
+
+move_chunks(sub_dir='chunk_files')[source]#
+

Move chunked files from a directory to a sub-directory.

+
+
Parameters:
+

sub_dir (path-like, optional) – Sub directory name to move chunks to. By default, +“chunk_files”.

+
+
+
+ +
+
+collect(dataset_in, dataset_out=None, memory_utilization_limit=0.7, pass_through=False)[source]#
+

Collect a dataset from h5_dir to h5_file

+
+
Parameters:
+
    +
  • dataset_in (str) – Name of dataset to collect. If source shape is 2D, +time index will be collected as well.

  • +
  • dataset_out (str) – Name of dataset into which collected data is to be written. +If None the name of the output dataset is assumed to match +the dataset input name. By default, None.

  • +
  • memory_utilization_limit (float) – Memory utilization limit (fractional). This sets how many +sites will be collected at a time. By default, 0.7.

  • +
  • pass_through (bool) – Flag to just pass through dataset from one of the source +files, assuming all of the source files have identical +copies of this dataset. By default, False.

  • +
+
+
+
+

See also

+
+
Collector.add_dataset

Collect a dataset into an existing HDF5 file.

+
+
+
+
+ +
+
+classmethod add_dataset(h5_file, collect_pattern, dataset_in, dataset_out=None, memory_utilization_limit=0.7, pass_through=False)[source]#
+

Collect and add a dataset to a single HDF5 file.

+
+
Parameters:
+
    +
  • h5_file (path-like) – Path to output HDF5 file into which data will be collected. +Note that this file must already exist and have a valid +meta.

  • +
  • collect_pattern (str) – Unix-style /filepath/pattern*.h5 representing a list of +input files to be collected into a single HDF5 file.

  • +
  • dataset_in (str) – Name of dataset to collect. If source shape is 2D, +time index will be collected as well.

  • +
  • dataset_out (str) – Name of dataset into which collected data is to be written. +If None the name of the output dataset is assumed to match +the dataset input name. By default, None.

  • +
  • memory_utilization_limit (float) – Memory utilization limit (fractional). This sets how many +sites will be collected at a time. By default, 0.7.

  • +
  • pass_through (bool) – Flag to just pass through dataset from one of the source +files, assuming all of the source files have identical +copies of this dataset. By default, False.

  • +
+
+
+
+

See also

+
+
Collector.collect

Collect a dataset into a file that does not yet exist.

+
+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.DatasetCollector.html b/_autosummary/gaps.collection.DatasetCollector.html new file mode 100644 index 00000000..1f5e8de5 --- /dev/null +++ b/_autosummary/gaps.collection.DatasetCollector.html @@ -0,0 +1,704 @@ + + + + + + + + + + + gaps.collection.DatasetCollector — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.DatasetCollector#

+
+
+class DatasetCollector(h5_file, source_files, gids, dataset_in, dataset_out=None, memory_utilization_limit=0.7, pass_through=False)[source]#
+

Bases: object

+

Collector for a single dataset.

+
+
Parameters:
+
    +
  • h5_file (path-like) – Path to h5_file into which dataset is to be collected.

  • +
  • source_files (list) – List of source filepaths.

  • +
  • gids (list) – List of gids to be collected.

  • +
  • dataset_in (str) – Name of dataset to collect.

  • +
  • dataset_out (str, optional) – Name of dataset into which collected data is to be written. +If None the name of the output dataset is assumed to match +the dataset input name. By default, None.

  • +
  • memory_utilization_limit (float, optional) – Memory utilization limit (fractional). This sets how many +sites will be collected at a time. By default, 0.7.

  • +
  • pass_through (bool, optional) – Flag to just pass through dataset from one of the source +files, assuming all of the source files have identical +copies of this dataset. By default, False.

  • +
+
+
+

Methods

+ + + + + + +

collect_dataset(h5_file, source_files, gids, ...)

Collect a dataset from multiple source files into a single file.

+

Attributes

+ + + + + + + + + +

duplicate_gids

True if there are duplicate gids being collected.

gids

List of gids corresponding to all sites to be combined.

+
+
+property gids#
+

List of gids corresponding to all sites to be combined.

+
+
Type:
+

list

+
+
+
+ +
+
+property duplicate_gids#
+

True if there are duplicate gids being collected.

+
+
Type:
+

bool

+
+
+
+ +
+
+classmethod collect_dataset(h5_file, source_files, gids, dataset_in, dataset_out=None, memory_utilization_limit=0.7, pass_through=False)[source]#
+

Collect a dataset from multiple source files into a single file.

+
+
Parameters:
+
    +
  • h5_file (path-like) – Path to h5_file into which dataset is to be collected.

  • +
  • source_files (list) – List of source filepaths.

  • +
  • gids (list) – List of gids to be collected.

  • +
  • dataset_in (str) – Name of dataset to collect.

  • +
  • dataset_out (str, optional) – Name of dataset into which collected data is to be written. +If None the name of the output dataset is assumed to match +the dataset input name. By default, None.

  • +
  • memory_utilization_limit (float, optional) – Memory utilization limit (fractional). This sets how many +sites will be collected at a time. By default, 0.7.

  • +
  • pass_through (bool, optional) – Flag to just pass through dataset from one of the source +files, assuming all of the source files have identical +copies of this dataset. By default, False.

  • +
+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.find_h5_files.html b/_autosummary/gaps.collection.find_h5_files.html new file mode 100644 index 00000000..930e1d6a --- /dev/null +++ b/_autosummary/gaps.collection.find_h5_files.html @@ -0,0 +1,630 @@ + + + + + + + + + + + gaps.collection.find_h5_files — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.find_h5_files#

+
+
+find_h5_files(collect_pattern, ignore=None)[source]#
+

Search pattern for existing HDF5 files.

+
+
Parameters:
+
    +
  • collect_pattern (str) – Unix-style /filepath/pattern*.h5 representing a list of +input files to be collected into a single HDF5 file.

  • +
  • ignore (str | container | None, optional) – File name(s) to ignore. By default, None.

  • +
+
+
Returns:
+

list – List of sorted filepaths.

+
+
Raises:
+

gapsRuntimeError – If not all source files end in “.h5” (i.e. are not of type HDF5).

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.html b/_autosummary/gaps.collection.html new file mode 100644 index 00000000..eca6dfb7 --- /dev/null +++ b/_autosummary/gaps.collection.html @@ -0,0 +1,617 @@ + + + + + + + + + + + gaps.collection — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection#

+

Base class to handle collection of datasets across multiple .h5 files

+

Functions

+ + + + + + + + + + + + + + + +

find_h5_files(collect_pattern[, ignore])

Search pattern for existing HDF5 files.

parse_gids_from_files(h5_files)

Extract a gid list from a list of h5_files.

parse_meta(h5_file)

Extract and convert meta data from a rec.array to a DataFrame.

parse_project_points(project_points)

Extract resource gids from project points.

+

Classes

+ + + + + + + + + +

Collector(h5_file, collect_pattern, ...[, ...])

Collector of multiple source files into a single output file.

DatasetCollector(h5_file, source_files, ...)

Collector for a single dataset.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.parse_gids_from_files.html b/_autosummary/gaps.collection.parse_gids_from_files.html new file mode 100644 index 00000000..3d9d995a --- /dev/null +++ b/_autosummary/gaps.collection.parse_gids_from_files.html @@ -0,0 +1,623 @@ + + + + + + + + + + + gaps.collection.parse_gids_from_files — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.parse_gids_from_files#

+
+
+parse_gids_from_files(h5_files)[source]#
+

Extract a gid list from a list of h5_files.

+
+
Parameters:
+

h5_files (list) – List of h5 files to be collected.

+
+
Returns:
+

gids (list) – List of resource gids to be collected.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.parse_meta.html b/_autosummary/gaps.collection.parse_meta.html new file mode 100644 index 00000000..4620fe3f --- /dev/null +++ b/_autosummary/gaps.collection.parse_meta.html @@ -0,0 +1,623 @@ + + + + + + + + + + + gaps.collection.parse_meta — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.parse_meta#

+
+
+parse_meta(h5_file)[source]#
+

Extract and convert meta data from a rec.array to a DataFrame.

+
+
Parameters:
+

h5_file (path-like) – Path to HDF5 file from which meta is to be parsed.

+
+
Returns:
+

meta (pd.DataFrame) – Portion of meta data corresponding to sites in h5_file.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.collection.parse_project_points.html b/_autosummary/gaps.collection.parse_project_points.html new file mode 100644 index 00000000..0c4cfaf6 --- /dev/null +++ b/_autosummary/gaps.collection.parse_project_points.html @@ -0,0 +1,624 @@ + + + + + + + + + + + gaps.collection.parse_project_points — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.collection.parse_project_points#

+
+
+parse_project_points(project_points)[source]#
+

Extract resource gids from project points.

+
+
Parameters:
+

project_points (str | slice | list | pandas.DataFrame) – Reference to resource points that were processed and need +collecting.

+
+
Returns:
+

gids (list) – List of resource gids that are to be collected.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.ConfigType.html b/_autosummary/gaps.config.ConfigType.html new file mode 100644 index 00000000..9decd416 --- /dev/null +++ b/_autosummary/gaps.config.ConfigType.html @@ -0,0 +1,636 @@ + + + + + + + + + + + gaps.config.ConfigType — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.ConfigType#

+
+
+class ConfigType(value)#
+

Bases: _ConfigType

+

An enumeration of the available gaps config types.

+

Attributes

+ + + + + + + + + + + + + + + + + + +

JSON

JSON5

YAML

YML

TOML

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.Handler.html b/_autosummary/gaps.config.Handler.html new file mode 100644 index 00000000..c74c147f --- /dev/null +++ b/_autosummary/gaps.config.Handler.html @@ -0,0 +1,693 @@ + + + + + + + + + + + gaps.config.Handler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.Handler#

+
+
+class Handler[source]#
+

Bases: ABC

+

ABC for configuration file handler.

+

Methods

+ + + + + + + + + + + + + + + + + + +

dump(config, stream)

Write the config to a stream (file).

dumps(config)

Convert the config to a string.

load(file_name)

Load the file contents.

loads(config_str)

Parse the string into a config dictionary.

write(file_name, data)

Write the data to a file.

+

Attributes

+ + + + + + +

FILE_EXTENSION

Enum name to use

+
+
+classmethod load(file_name)[source]#
+

Load the file contents.

+
+ +
+
+classmethod write(file_name, data)[source]#
+

Write the data to a file.

+
+ +
+
+abstract classmethod dump(config, stream)[source]#
+

Write the config to a stream (file).

+
+ +
+
+abstract classmethod dumps(config)[source]#
+

Convert the config to a string.

+
+ +
+
+abstract classmethod loads(config_str)[source]#
+

Parse the string into a config dictionary.

+
+ +
+
+abstract property FILE_EXTENSION#
+

Enum name to use

+
+
Type:
+

str

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.JSON5Handler.html b/_autosummary/gaps.config.JSON5Handler.html new file mode 100644 index 00000000..a4da36bf --- /dev/null +++ b/_autosummary/gaps.config.JSON5Handler.html @@ -0,0 +1,681 @@ + + + + + + + + + + + gaps.config.JSON5Handler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.JSON5Handler#

+
+
+class JSON5Handler[source]#
+

Bases: Handler

+

JSON5 config file handler.

+

Methods

+ + + + + + + + + + + + + + + + + + +

dump(config, stream)

Write the config to a stream (JSON5 file).

dumps(config)

Convert the config to a JSON5 string.

load(file_name)

Load the file contents.

loads(config_str)

Parse the JSON5 string into a config dictionary.

write(file_name, data)

Write the data to a file.

+

Attributes

+ + + + + + +

FILE_EXTENSION

+
+
+classmethod dump(config, stream)[source]#
+

Write the config to a stream (JSON5 file).

+
+ +
+
+classmethod dumps(config)[source]#
+

Convert the config to a JSON5 string.

+
+ +
+
+classmethod loads(config_str)[source]#
+

Parse the JSON5 string into a config dictionary.

+
+ +
+
+classmethod load(file_name)#
+

Load the file contents.

+
+ +
+
+classmethod write(file_name, data)#
+

Write the data to a file.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.JSONHandler.html b/_autosummary/gaps.config.JSONHandler.html new file mode 100644 index 00000000..82a961ec --- /dev/null +++ b/_autosummary/gaps.config.JSONHandler.html @@ -0,0 +1,681 @@ + + + + + + + + + + + gaps.config.JSONHandler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.JSONHandler#

+
+
+class JSONHandler[source]#
+

Bases: Handler

+

JSON config file handler.

+

Methods

+ + + + + + + + + + + + + + + + + + +

dump(config, stream)

Write the config to a stream (JSON file).

dumps(config)

Convert the config to a JSON string.

load(file_name)

Load the file contents.

loads(config_str)

Parse the JSON string into a config dictionary.

write(file_name, data)

Write the data to a file.

+

Attributes

+ + + + + + +

FILE_EXTENSION

+
+
+classmethod dump(config, stream)[source]#
+

Write the config to a stream (JSON file).

+
+ +
+
+classmethod dumps(config)[source]#
+

Convert the config to a JSON string.

+
+ +
+
+classmethod loads(config_str)[source]#
+

Parse the JSON string into a config dictionary.

+
+ +
+
+classmethod load(file_name)#
+

Load the file contents.

+
+ +
+
+classmethod write(file_name, data)#
+

Write the data to a file.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.TOMLHandler.html b/_autosummary/gaps.config.TOMLHandler.html new file mode 100644 index 00000000..230d3740 --- /dev/null +++ b/_autosummary/gaps.config.TOMLHandler.html @@ -0,0 +1,681 @@ + + + + + + + + + + + gaps.config.TOMLHandler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.TOMLHandler#

+
+
+class TOMLHandler[source]#
+

Bases: Handler

+

TOML config file handler.

+

Methods

+ + + + + + + + + + + + + + + + + + +

dump(config, stream)

Write the config to a stream (TOML file).

dumps(config)

Convert the config to a TOML string.

load(file_name)

Load the file contents.

loads(config_str)

Parse the TOML string into a config dictionary.

write(file_name, data)

Write the data to a file.

+

Attributes

+ + + + + + +

FILE_EXTENSION

+
+
+classmethod dump(config, stream)[source]#
+

Write the config to a stream (TOML file).

+
+ +
+
+classmethod dumps(config)[source]#
+

Convert the config to a TOML string.

+
+ +
+
+classmethod loads(config_str)[source]#
+

Parse the TOML string into a config dictionary.

+
+ +
+
+classmethod load(file_name)#
+

Load the file contents.

+
+ +
+
+classmethod write(file_name, data)#
+

Write the data to a file.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.YAMLHandler.html b/_autosummary/gaps.config.YAMLHandler.html new file mode 100644 index 00000000..2b104c0d --- /dev/null +++ b/_autosummary/gaps.config.YAMLHandler.html @@ -0,0 +1,681 @@ + + + + + + + + + + + gaps.config.YAMLHandler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.YAMLHandler#

+
+
+class YAMLHandler[source]#
+

Bases: Handler

+

YAML config file handler.

+

Methods

+ + + + + + + + + + + + + + + + + + +

dump(config, stream)

Write the config to a stream (YAML file).

dumps(config)

Convert the config to a YAML string.

load(file_name)

Load the file contents.

loads(config_str)

Parse the YAML string into a config dictionary.

write(file_name, data)

Write the data to a file.

+

Attributes

+ + + + + + +

FILE_EXTENSION

+
+
+classmethod dump(config, stream)[source]#
+

Write the config to a stream (YAML file).

+
+ +
+
+classmethod dumps(config)[source]#
+

Convert the config to a YAML string.

+
+ +
+
+classmethod loads(config_str)[source]#
+

Parse the YAML string into a config dictionary.

+
+ +
+
+classmethod load(file_name)#
+

Load the file contents.

+
+ +
+
+classmethod write(file_name, data)#
+

Write the data to a file.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.config_as_str_for_docstring.html b/_autosummary/gaps.config.config_as_str_for_docstring.html new file mode 100644 index 00000000..6eceb91b --- /dev/null +++ b/_autosummary/gaps.config.config_as_str_for_docstring.html @@ -0,0 +1,632 @@ + + + + + + + + + + + gaps.config.config_as_str_for_docstring — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.config_as_str_for_docstring#

+
+
+config_as_str_for_docstring(config, config_type=ConfigType.JSON, num_spaces=12)[source]#
+

Convert a config into a string to be used within a docstring.

+

In particular, the config is serialized and extra whitespace is +added after each newline.

+
+
Parameters:
+
    +
  • config (dict) – Dictionary containing the configuration to be converted into +docstring format.

  • +
  • config_type (ConfigType, optional) – A ConfigType enumeration value specifying what type +of config file to generate. By default, ConfigType.JSON.

  • +
  • num_spaces (int, optional) – Number of spaces to add after a newline. By default, 12.

  • +
+
+
Returns:
+

str – A string version of the input config, conforming to the +specified config type.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.html b/_autosummary/gaps.config.html new file mode 100644 index 00000000..0318c425 --- /dev/null +++ b/_autosummary/gaps.config.html @@ -0,0 +1,637 @@ + + + + + + + + + + + gaps.config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config#

+

GAPs config functions and classes.

+

Module attributes

+ + + + + + +

ConfigType(value)

An enumeration of the available gaps config types.

+

Functions

+ + + + + + + + + + + + + + + +

config_as_str_for_docstring(config[, ...])

Convert a config into a string to be used within a docstring.

init_logging_from_config_file(config_file[, ...])

Init logging, taking care of legacy rex-style kwargs.

load_config(config_filepath[, resolve_paths])

Load a config file

resolve_all_paths(container, base_dir)

Perform a deep string replacement and path resolve in container.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + +

ConfigType(value)

An enumeration of the available gaps config types.

Handler()

ABC for configuration file handler.

JSON5Handler()

JSON5 config file handler.

JSONHandler()

JSON config file handler.

TOMLHandler()

TOML config file handler.

YAMLHandler()

YAML config file handler.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.init_logging_from_config_file.html b/_autosummary/gaps.config.init_logging_from_config_file.html new file mode 100644 index 00000000..dc9fda6b --- /dev/null +++ b/_autosummary/gaps.config.init_logging_from_config_file.html @@ -0,0 +1,630 @@ + + + + + + + + + + + gaps.config.init_logging_from_config_file — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.init_logging_from_config_file#

+
+
+init_logging_from_config_file(config_file, background=False)[source]#
+

Init logging, taking care of legacy rex-style kwargs.

+
+
Parameters:
+
    +
  • config_file (path-like) – Path to a config file parsable as a dictionary which may or may +not contain a “logging” key. If key not found, no further action +is taken. If key is found, the value is expected to be a +dictionary of keyword-argument pairs +to gaps.log.init_logger(). rex-style keys (“log_level”, +“log_file”, “log_format”) are allowed. The values of these +inputs are used to initialize a gaps logger.

  • +
  • background (bool, optional) – Optional flag to specify background job initialization. If +True, then stream output is disabled. By default, False.

  • +
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.load_config.html b/_autosummary/gaps.config.load_config.html new file mode 100644 index 00000000..21d42fcd --- /dev/null +++ b/_autosummary/gaps.config.load_config.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps.config.load_config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.load_config#

+
+
+load_config(config_filepath, resolve_paths=True)[source]#
+

Load a config file

+
+
Parameters:
+
    +
  • config_filepath (path-like) – Path to config file.

  • +
  • resolve_paths (bool, optional) – Option to (recursively) resolve file-paths in the dictionary +w.r.t the config file directory. +By default, True.

  • +
+
+
Returns:
+

dict – Dictionary containing configuration parameters.

+
+
Raises:
+

gapsValueError – If input config_filepath has no file ending.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.config.resolve_all_paths.html b/_autosummary/gaps.config.resolve_all_paths.html new file mode 100644 index 00000000..2b26e055 --- /dev/null +++ b/_autosummary/gaps.config.resolve_all_paths.html @@ -0,0 +1,628 @@ + + + + + + + + + + + gaps.config.resolve_all_paths — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.config.resolve_all_paths#

+
+
+resolve_all_paths(container, base_dir)[source]#
+

Perform a deep string replacement and path resolve in container.

+
+
Parameters:
+
    +
  • container (dict | list) – Container like a dictionary or list that may (or may not) +contain relative paths to resolve.

  • +
  • base_dir (path-like) – Base path to directory from which to resolve path string +(typically current directory)

  • +
+
+
Returns:
+

container – Input container with updated strings.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsConfigError.html b/_autosummary/gaps.exceptions.gapsConfigError.html new file mode 100644 index 00000000..4216275b --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsConfigError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsConfigError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsConfigError#

+
+
+exception gapsConfigError(*args, **kwargs)[source]#
+

gaps ConfigError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsError.html b/_autosummary/gaps.exceptions.gapsError.html new file mode 100644 index 00000000..78589c26 --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsError#

+
+
+exception gapsError(*args, **kwargs)[source]#
+

Generic gaps Error.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsExecutionError.html b/_autosummary/gaps.exceptions.gapsExecutionError.html new file mode 100644 index 00000000..2e0cb78b --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsExecutionError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsExecutionError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsExecutionError#

+
+
+exception gapsExecutionError(*args, **kwargs)[source]#
+

gaps ExecutionError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsHPCError.html b/_autosummary/gaps.exceptions.gapsHPCError.html new file mode 100644 index 00000000..68eb7f9a --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsHPCError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsHPCError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsHPCError#

+
+
+exception gapsHPCError(*args, **kwargs)[source]#
+

gaps HPCError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsIOError.html b/_autosummary/gaps.exceptions.gapsIOError.html new file mode 100644 index 00000000..f1178369 --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsIOError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsIOError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsIOError#

+
+
+exception gapsIOError(*args, **kwargs)[source]#
+

gaps IOError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsIndexError.html b/_autosummary/gaps.exceptions.gapsIndexError.html new file mode 100644 index 00000000..e6576f4a --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsIndexError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsIndexError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsIndexError#

+
+
+exception gapsIndexError(*args, **kwargs)[source]#
+

gaps IndexError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsKeyError.html b/_autosummary/gaps.exceptions.gapsKeyError.html new file mode 100644 index 00000000..66f46c5d --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsKeyError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsKeyError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsKeyError#

+
+
+exception gapsKeyError(*args, **kwargs)[source]#
+

gaps KeyError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsRuntimeError.html b/_autosummary/gaps.exceptions.gapsRuntimeError.html new file mode 100644 index 00000000..f694d66a --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsRuntimeError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsRuntimeError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsRuntimeError#

+
+
+exception gapsRuntimeError(*args, **kwargs)[source]#
+

gaps RuntimeError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsTypeError.html b/_autosummary/gaps.exceptions.gapsTypeError.html new file mode 100644 index 00000000..967ac46f --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsTypeError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsTypeError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsTypeError#

+
+
+exception gapsTypeError(*args, **kwargs)[source]#
+

gaps TypeError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.gapsValueError.html b/_autosummary/gaps.exceptions.gapsValueError.html new file mode 100644 index 00000000..a49912d1 --- /dev/null +++ b/_autosummary/gaps.exceptions.gapsValueError.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.exceptions.gapsValueError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions.gapsValueError#

+
+
+exception gapsValueError(*args, **kwargs)[source]#
+

gaps ValueError.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.exceptions.html b/_autosummary/gaps.exceptions.html new file mode 100644 index 00000000..940a74b6 --- /dev/null +++ b/_autosummary/gaps.exceptions.html @@ -0,0 +1,624 @@ + + + + + + + + + + + gaps.exceptions — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.exceptions#

+

Custom Exceptions and Errors for gaps.

+

Exceptions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

gapsConfigError(*args, **kwargs)

gaps ConfigError.

gapsError(*args, **kwargs)

Generic gaps Error.

gapsExecutionError(*args, **kwargs)

gaps ExecutionError.

gapsHPCError(*args, **kwargs)

gaps HPCError.

gapsIOError(*args, **kwargs)

gaps IOError.

gapsIndexError(*args, **kwargs)

gaps IndexError.

gapsKeyError(*args, **kwargs)

gaps KeyError.

gapsRuntimeError(*args, **kwargs)

gaps RuntimeError.

gapsTypeError(*args, **kwargs)

gaps TypeError.

gapsValueError(*args, **kwargs)

gaps ValueError.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.COMMANDS.html b/_autosummary/gaps.hpc.COMMANDS.html new file mode 100644 index 00000000..bd94dbb0 --- /dev/null +++ b/_autosummary/gaps.hpc.COMMANDS.html @@ -0,0 +1,683 @@ + + + + + + + + + + + gaps.hpc.COMMANDS — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.COMMANDS#

+
+
+class COMMANDS(SUBMIT, CANCEL)#
+

Bases: tuple

+

Create new instance of COMMANDS(SUBMIT, CANCEL)

+

Methods

+ + + + + + + + + +

count(value, /)

Return number of occurrences of value.

index(value[, start, stop])

Return first index of value.

+

Attributes

+ + + + + + + + + +

CANCEL

Alias for field number 1

SUBMIT

Alias for field number 0

+
+
+CANCEL#
+

Alias for field number 1

+
+ +
+
+SUBMIT#
+

Alias for field number 0

+
+ +
+
+__add__(value, /)#
+

Return self+value.

+
+ +
+
+__mul__(value, /)#
+

Return self*value.

+
+ +
+
+count(value, /)#
+

Return number of occurrences of value.

+
+ +
+
+index(value, start=0, stop=9223372036854775807, /)#
+

Return first index of value.

+

Raises ValueError if the value is not present.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.html b/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.html new file mode 100644 index 00000000..ac1b121a --- /dev/null +++ b/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.hpc.DEFAULT_STDOUT_PATH — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.DEFAULT_STDOUT_PATH#

+
+
+DEFAULT_STDOUT_PATH = './stdout'#
+

Default directory for .stdout and .stderr files.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.HpcJobManager.html b/_autosummary/gaps.hpc.HpcJobManager.html new file mode 100644 index 00000000..9f753edb --- /dev/null +++ b/_autosummary/gaps.hpc.HpcJobManager.html @@ -0,0 +1,851 @@ + + + + + + + + + + + gaps.hpc.HpcJobManager — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.HpcJobManager#

+
+
+class HpcJobManager(user=None, queue_dict=None)[source]#
+

Bases: ABC

+

Abstract HPC job manager framework

+
+
Parameters:
+
    +
  • user (str | None, optional) – HPC username. None will get your username using +getpass.getuser(). By default, None.

  • +
  • queue_dict (dict | None, optional) – Parsed HPC queue dictionary from parse_queue_str(). +None will get the queue info from the hardware. +By default, None.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

cancel(arg)

Cancel a job.

check_status_using_job_id(job_id)

Check the status of a job using the HPC queue and job ID.

check_status_using_job_name(job_name)

Check the status of a job using the HPC queue and job name.

make_script_str(name, **kwargs)

Generate the submission script.

parse_queue_str(queue_str)

Parse the hardware queue string into a nested dictionary.

query_queue()

Run the HPC queue command and return the raw stdout string.

reset_query_cache()

Reset the query dict cache so that hardware is queried again.

submit(name[, keep_sh])

Submit a job on the HPC.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

COLUMN_HEADERS

Column header names.

COMMANDS

Command names.

MAX_NAME_LEN

Q_SUBMITTED_STATUS

String representing the submitted status for this manager.

SHELL_FILENAME_FMT

USER

queue

HPC queue keyed by job ids with values as job properties.

+
+
+classmethod parse_queue_str(queue_str)[source]#
+

Parse the hardware queue string into a nested dictionary.

+

This function parses the queue output string into a dictionary +keyed by integer job ids with values as dictionaries of job +properties (queue printout columns).

+
+
Parameters:
+

queue_str (str) – HPC queue output string. Typically a space-delimited string +with line breaks.

+
+
Returns:
+

queue_dict (dict) – HPC queue parsed into dictionary format keyed by integer job +ids with values as dictionaries of job properties (queue +printout columns).

+
+
+
+ +
+
+property queue#
+

HPC queue keyed by job ids with values as job properties.

+
+
Type:
+

dict

+
+
+
+ +
+
+reset_query_cache()[source]#
+

Reset the query dict cache so that hardware is queried again.

+
+ +
+
+check_status_using_job_id(job_id)[source]#
+

Check the status of a job using the HPC queue and job ID.

+
+
Parameters:
+

job_id (int) – Job integer ID number.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+check_status_using_job_name(job_name)[source]#
+

Check the status of a job using the HPC queue and job name.

+
+
Parameters:
+

job_name (str) – Job name string.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+cancel(arg)[source]#
+

Cancel a job.

+
+
Parameters:
+

arg (int | list | str) – Integer job id(s) to cancel. Can be a list of integer +job ids, ‘all’ to cancel all jobs, or a feature (-p short) +to cancel all jobs with a given feature

+
+
+
+ +
+
+submit(name, keep_sh=False, **kwargs)[source]#
+

Submit a job on the HPC.

+
+
Parameters:
+
    +
  • name (str) – HPC job name.

  • +
  • keep_sh (bool, optional) – Option to keep the submission script on disk. +By default, False.

  • +
  • **kwargs – Extra keyword-argument pairs to be passed to +make_script_str().

  • +
+
+
Returns:
+

    +
  • out (str) – Standard output from submission. If submitted successfully, +this is the Job ID.

  • +
  • err (str) – Standard error. This is an empty string if the job was +submitted successfully.

  • +
+

+
+
+
+ +
+
+abstract query_queue()[source]#
+

Run the HPC queue command and return the raw stdout string.

+
+
Returns:
+

stdout (list) – HPC queue output string that can be split into a list on +line breaks.

+
+
+
+ +
+
+abstract make_script_str(name, **kwargs)[source]#
+

Generate the submission script.

+
+ +
+
+abstract property COLUMN_HEADERS#
+

Column header names.

+
+
Type:
+

namedtuple

+
+
+
+ +
+
+abstract property COMMANDS#
+

Command names.

+
+
Type:
+

namedtuple

+
+
+
+ +
+
+abstract property Q_SUBMITTED_STATUS#
+

String representing the submitted status for this manager.

+
+
Type:
+

str

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.PBS.html b/_autosummary/gaps.hpc.PBS.html new file mode 100644 index 00000000..cdd91c96 --- /dev/null +++ b/_autosummary/gaps.hpc.PBS.html @@ -0,0 +1,845 @@ + + + + + + + + + + + gaps.hpc.PBS — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.PBS#

+
+
+class PBS(user=None, queue_dict=None)[source]#
+

Bases: HpcJobManager

+

Subclass for PBS subprocess jobs.

+
+
Parameters:
+
    +
  • user (str | None, optional) – HPC username. None will get your username using +getpass.getuser(). By default, None.

  • +
  • queue_dict (dict | None, optional) – Parsed HPC queue dictionary from parse_queue_str(). +None will get the queue info from the hardware. +By default, None.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

cancel(arg)

Cancel a job.

check_status_using_job_id(job_id)

Check the status of a job using the HPC queue and job ID.

check_status_using_job_name(job_name)

Check the status of a job using the HPC queue and job name.

make_script_str(name, cmd, allocation, walltime)

Generate the PBS submission script.

parse_queue_str(queue_str)

Parse the hardware queue string into a nested dictionary.

query_queue()

Run the PBS qstat command and return the raw stdout string.

reset_query_cache()

Reset the query dict cache so that hardware is queried again.

submit(name[, keep_sh])

Submit a job on the HPC.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

COLUMN_HEADERS

COMMANDS

MAX_NAME_LEN

Q_SUBMITTED_STATUS

SHELL_FILENAME_FMT

USER

queue

HPC queue keyed by job ids with values as job properties.

+
+
+query_queue()[source]#
+

Run the PBS qstat command and return the raw stdout string.

+
+
Returns:
+

stdout (str) – qstat output string. Can be split on line breaks to get +list.

+
+
+
+ +
+
+make_script_str(name, cmd, allocation, walltime, qos=None, memory=None, queue=None, feature=None, stdout_path='./stdout', conda_env=None, sh_script=None)[source]#
+

Generate the PBS submission script.

+
+
Parameters:
+
    +
  • name (str) – PBS job name.

  • +
  • cmd (str) –

    +
    +
    Command to be submitted in PBS shell script. Example:

    ‘python -m reV.generation.cli_gen’

    +
    +
    +
  • +
  • allocation (str) – HPC allocation account. Example: ‘rev’.

  • +
  • walltime (int | float) – Node walltime request in hours. Example: 4.

  • +
  • qos (str, optional) – Quality of service string for job. By default, None.

  • +
  • memory (int , optional) – Node memory request in GB. By default, None.

  • +
  • queue (str) – HPC queue to submit job to. Examples include: ‘debug’, +‘short’, ‘batch’, ‘batch-h’, ‘long’, etc. By default, +None, which uses test_queue.

  • +
  • feature (str, optional) – PBS feature request (-l {feature}). Example: +‘feature=24core’. Do not use this input for QOS. Use the +``qos`` arg instead. By default, None.

  • +
  • stdout_path (str, optional) – Path to print .stdout and .stderr files. +By default, DEFAULT_STDOUT_PATH.

  • +
  • conda_env (str, optional) – Conda environment to activate. By default, None.

  • +
  • sh_script (str, optional) – Script to run before executing command. By default, None.

  • +
+
+
Returns:
+

str – PBS script to submit.

+
+
+
+ +
+
+cancel(arg)#
+

Cancel a job.

+
+
Parameters:
+

arg (int | list | str) – Integer job id(s) to cancel. Can be a list of integer +job ids, ‘all’ to cancel all jobs, or a feature (-p short) +to cancel all jobs with a given feature

+
+
+
+ +
+
+check_status_using_job_id(job_id)#
+

Check the status of a job using the HPC queue and job ID.

+
+
Parameters:
+

job_id (int) – Job integer ID number.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+check_status_using_job_name(job_name)#
+

Check the status of a job using the HPC queue and job name.

+
+
Parameters:
+

job_name (str) – Job name string.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+classmethod parse_queue_str(queue_str)#
+

Parse the hardware queue string into a nested dictionary.

+

This function parses the queue output string into a dictionary +keyed by integer job ids with values as dictionaries of job +properties (queue printout columns).

+
+
Parameters:
+

queue_str (str) – HPC queue output string. Typically a space-delimited string +with line breaks.

+
+
Returns:
+

queue_dict (dict) – HPC queue parsed into dictionary format keyed by integer job +ids with values as dictionaries of job properties (queue +printout columns).

+
+
+
+ +
+
+property queue#
+

HPC queue keyed by job ids with values as job properties.

+
+
Type:
+

dict

+
+
+
+ +
+
+reset_query_cache()#
+

Reset the query dict cache so that hardware is queried again.

+
+ +
+
+submit(name, keep_sh=False, **kwargs)#
+

Submit a job on the HPC.

+
+
Parameters:
+
    +
  • name (str) – HPC job name.

  • +
  • keep_sh (bool, optional) – Option to keep the submission script on disk. +By default, False.

  • +
  • **kwargs – Extra keyword-argument pairs to be passed to +make_script_str().

  • +
+
+
Returns:
+

    +
  • out (str) – Standard output from submission. If submitted successfully, +this is the Job ID.

  • +
  • err (str) – Standard error. This is an empty string if the job was +submitted successfully.

  • +
+

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.Q_COLUMNS.html b/_autosummary/gaps.hpc.Q_COLUMNS.html new file mode 100644 index 00000000..90d11623 --- /dev/null +++ b/_autosummary/gaps.hpc.Q_COLUMNS.html @@ -0,0 +1,693 @@ + + + + + + + + + + + gaps.hpc.Q_COLUMNS — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.Q_COLUMNS#

+
+
+class Q_COLUMNS(NAME, ID, STATUS)#
+

Bases: tuple

+

Create new instance of Q_COLUMNS(NAME, ID, STATUS)

+

Methods

+ + + + + + + + + +

count(value, /)

Return number of occurrences of value.

index(value[, start, stop])

Return first index of value.

+

Attributes

+ + + + + + + + + + + + +

ID

Alias for field number 1

NAME

Alias for field number 0

STATUS

Alias for field number 2

+
+
+ID#
+

Alias for field number 1

+
+ +
+
+NAME#
+

Alias for field number 0

+
+ +
+
+STATUS#
+

Alias for field number 2

+
+ +
+
+__add__(value, /)#
+

Return self+value.

+
+ +
+
+__mul__(value, /)#
+

Return self*value.

+
+ +
+
+count(value, /)#
+

Return number of occurrences of value.

+
+ +
+
+index(value, start=0, stop=9223372036854775807, /)#
+

Return first index of value.

+

Raises ValueError if the value is not present.

+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.SLURM.html b/_autosummary/gaps.hpc.SLURM.html new file mode 100644 index 00000000..a08b684d --- /dev/null +++ b/_autosummary/gaps.hpc.SLURM.html @@ -0,0 +1,845 @@ + + + + + + + + + + + gaps.hpc.SLURM — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.SLURM#

+
+
+class SLURM(user=None, queue_dict=None)[source]#
+

Bases: HpcJobManager

+

Subclass for SLURM subprocess jobs.

+
+
Parameters:
+
    +
  • user (str | None, optional) – HPC username. None will get your username using +getpass.getuser(). By default, None.

  • +
  • queue_dict (dict | None, optional) – Parsed HPC queue dictionary from parse_queue_str(). +None will get the queue info from the hardware. +By default, None.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

cancel(arg)

Cancel a job.

check_status_using_job_id(job_id)

Check the status of a job using the HPC queue and job ID.

check_status_using_job_name(job_name)

Check the status of a job using the HPC queue and job name.

make_script_str(name, cmd, allocation, walltime)

Generate the SLURM submission script.

parse_queue_str(queue_str)

Parse the hardware queue string into a nested dictionary.

query_queue()

Run the HPC queue command and return the raw stdout string.

reset_query_cache()

Reset the query dict cache so that hardware is queried again.

submit(name[, keep_sh])

Submit a job on the HPC.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

COLUMN_HEADERS

COMMANDS

MAX_NAME_LEN

Q_SUBMITTED_STATUS

SHELL_FILENAME_FMT

USER

queue

HPC queue keyed by job ids with values as job properties.

+
+
+query_queue()[source]#
+

Run the HPC queue command and return the raw stdout string.

+
+
Returns:
+

stdout (str) – HPC queue output string. Can be split on line breaks to get +a list.

+
+
+
+ +
+
+make_script_str(name, cmd, allocation, walltime, qos='normal', memory=None, feature=None, stdout_path='./stdout', conda_env=None, sh_script=None)[source]#
+

Generate the SLURM submission script.

+
+
Parameters:
+
    +
  • name (str) – SLURM job name.

  • +
  • cmd (str) –

    +
    +
    Command to be submitted in SLURM shell script. Example:

    ‘python -m reV.generation.cli_gen’

    +
    +
    +
  • +
  • allocation (str) – HPC allocation account. Example: ‘rev’.

  • +
  • walltime (int | float) – Node walltime request in hours. Example: 4.

  • +
  • qos ({“normal”, “high”}) – Quality of service specification for job. Jobs with “high” +priority will be charged at 2x the rate. By default, +"normal".

  • +
  • memory (int , optional) – Node memory request in GB. By default, None.

  • +
  • feature (str, optional) – Additional flags for SLURM job. Format is +“–partition=debug” or “–depend=[state:job_id]”. +Do not use this input to specify QOS. Use the ``qos`` input +instead. By default, None.

  • +
  • stdout_path (str, optional) – Path to print .stdout and .stderr files. +By default, DEFAULT_STDOUT_PATH.

  • +
  • conda_env (str, optional) – Conda environment to activate. By default, None.

  • +
  • sh_script (str, optional) – Script to run before executing command. By default, None.

  • +
+
+
Returns:
+

str – SLURM script to submit.

+
+
+
+ +
+
+cancel(arg)#
+

Cancel a job.

+
+
Parameters:
+

arg (int | list | str) – Integer job id(s) to cancel. Can be a list of integer +job ids, ‘all’ to cancel all jobs, or a feature (-p short) +to cancel all jobs with a given feature

+
+
+
+ +
+
+check_status_using_job_id(job_id)#
+

Check the status of a job using the HPC queue and job ID.

+
+
Parameters:
+

job_id (int) – Job integer ID number.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+check_status_using_job_name(job_name)#
+

Check the status of a job using the HPC queue and job name.

+
+
Parameters:
+

job_name (str) – Job name string.

+
+
Returns:
+

status (str | None) – Queue job status string or None if not found.

+
+
+
+ +
+
+classmethod parse_queue_str(queue_str)#
+

Parse the hardware queue string into a nested dictionary.

+

This function parses the queue output string into a dictionary +keyed by integer job ids with values as dictionaries of job +properties (queue printout columns).

+
+
Parameters:
+

queue_str (str) – HPC queue output string. Typically a space-delimited string +with line breaks.

+
+
Returns:
+

queue_dict (dict) – HPC queue parsed into dictionary format keyed by integer job +ids with values as dictionaries of job properties (queue +printout columns).

+
+
+
+ +
+
+property queue#
+

HPC queue keyed by job ids with values as job properties.

+
+
Type:
+

dict

+
+
+
+ +
+
+reset_query_cache()#
+

Reset the query dict cache so that hardware is queried again.

+
+ +
+
+submit(name, keep_sh=False, **kwargs)#
+

Submit a job on the HPC.

+
+
Parameters:
+
    +
  • name (str) – HPC job name.

  • +
  • keep_sh (bool, optional) – Option to keep the submission script on disk. +By default, False.

  • +
  • **kwargs – Extra keyword-argument pairs to be passed to +make_script_str().

  • +
+
+
Returns:
+

    +
  • out (str) – Standard output from submission. If submitted successfully, +this is the Job ID.

  • +
  • err (str) – Standard error. This is an empty string if the job was +submitted successfully.

  • +
+

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.format_env.html b/_autosummary/gaps.hpc.format_env.html new file mode 100644 index 00000000..340e0723 --- /dev/null +++ b/_autosummary/gaps.hpc.format_env.html @@ -0,0 +1,624 @@ + + + + + + + + + + + gaps.hpc.format_env — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.format_env#

+
+
+format_env(conda_env=None)[source]#
+

Get special sbatch request strings for SLURM conda environments.

+
+
Parameters:
+

conda_env (str, optional) – Conda environment to activate. By default, None, which returns +an empty string.

+
+
Returns:
+

env_str (str) – SBATCH shell script source activate environment request string.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.format_walltime.html b/_autosummary/gaps.hpc.format_walltime.html new file mode 100644 index 00000000..306d1b00 --- /dev/null +++ b/_autosummary/gaps.hpc.format_walltime.html @@ -0,0 +1,624 @@ + + + + + + + + + + + gaps.hpc.format_walltime — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.format_walltime#

+
+
+format_walltime(hours=None)[source]#
+

Get the SLURM walltime string in format “HH:MM:SS”

+
+
Parameters:
+

hours (float | int, optional) – Requested number of job hours. By default, None, which returns +an empty string.

+
+
Returns:
+

walltime (str) – SLURM walltime request in format “#SBATCH –time=HH:MM:SS”

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.html b/_autosummary/gaps.hpc.html new file mode 100644 index 00000000..602093b8 --- /dev/null +++ b/_autosummary/gaps.hpc.html @@ -0,0 +1,634 @@ + + + + + + + + + + + gaps.hpc — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc#

+

HPC Execution utilities.

+

Module attributes

+ + + + + + +

DEFAULT_STDOUT_PATH

Default directory for .stdout and .stderr files.

+

Functions

+ + + + + + + + + + + + + + + +

format_env([conda_env])

Get special sbatch request strings for SLURM conda environments.

format_walltime([hours])

Get the SLURM walltime string in format "HH:MM:SS"

make_sh(fname, script)

Make a shell script (.sh file) to execute a subprocess.

submit(cmd[, background, background_stdout])

Open a subprocess and submit a command.

+

Classes

+ + + + + + + + + + + + + + + + + + +

COMMANDS(SUBMIT, CANCEL)

Create new instance of COMMANDS(SUBMIT, CANCEL)

HpcJobManager([user, queue_dict])

Abstract HPC job manager framework

PBS([user, queue_dict])

Subclass for PBS subprocess jobs.

Q_COLUMNS(NAME, ID, STATUS)

Create new instance of Q_COLUMNS(NAME, ID, STATUS)

SLURM([user, queue_dict])

Subclass for SLURM subprocess jobs.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.make_sh.html b/_autosummary/gaps.hpc.make_sh.html new file mode 100644 index 00000000..8a1fd6dc --- /dev/null +++ b/_autosummary/gaps.hpc.make_sh.html @@ -0,0 +1,623 @@ + + + + + + + + + + + gaps.hpc.make_sh — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.make_sh#

+
+
+make_sh(fname, script)[source]#
+

Make a shell script (.sh file) to execute a subprocess.

+
+
Parameters:
+
    +
  • fname (str) – Name of the .sh file to create.

  • +
  • script (str) – Contents to be written into the .sh file.

  • +
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.hpc.submit.html b/_autosummary/gaps.hpc.submit.html new file mode 100644 index 00000000..58c787e3 --- /dev/null +++ b/_autosummary/gaps.hpc.submit.html @@ -0,0 +1,636 @@ + + + + + + + + + + + gaps.hpc.submit — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.hpc.submit#

+
+
+submit(cmd, background=False, background_stdout=False)[source]#
+

Open a subprocess and submit a command.

+
+
Parameters:
+
    +
  • cmd (str) – Command to be submitted using python subprocess.

  • +
  • background (bool) – Flag to submit subprocess in the background. stdout stderr will +be empty strings if this is True.

  • +
  • background_stdout (bool) – Flag to capture the stdout/stderr from the background process +in a nohup.out file.

  • +
+
+
Returns:
+

    +
  • stdout (str) – Subprocess standard output. This is decoded from the subprocess +stdout with rstrip.

  • +
  • stderr (str) – Subprocess standard error. This is decoded from the subprocess +stderr with rstrip. After decoding/rstrip, this will be empty if +the subprocess doesn’t return an error.

  • +
+

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.html b/_autosummary/gaps.html new file mode 100644 index 00000000..bd4317f0 --- /dev/null +++ b/_autosummary/gaps.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps#

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

gaps.batch

GAPs batching framework for parametric runs.

gaps.cli

gaps.collection

Base class to handle collection of datasets across multiple .h5 files

gaps.config

GAPs config functions and classes.

gaps.exceptions

Custom Exceptions and Errors for gaps.

gaps.hpc

HPC Execution utilities.

gaps.legacy

Legacy reV-like status manager.

gaps.log

Logging utilities.

gaps.pipeline

GAPs Pipeline architecture.

gaps.project_points

GAPs Project Points

gaps.status

gaps Job status manager.

gaps.utilities

GAPs utilities.

gaps.version

GAPs Version Number.

gaps.warnings

Custom Warning for GAPs.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.BatchJob.html b/_autosummary/gaps.legacy.BatchJob.html new file mode 100644 index 00000000..137a7a93 --- /dev/null +++ b/_autosummary/gaps.legacy.BatchJob.html @@ -0,0 +1,794 @@ + + + + + + + + + + + gaps.legacy.BatchJob — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy.BatchJob#

+
+
+class BatchJob(config)[source]#
+

Bases: BatchJob

+

Legacy reV-like batch job framework.

+

To use this class, simply override the following two attributes:

+
+
+
PIPELINE_CLASSPipeline

Pipeline class with at least two class methods: run and +cancel_all The run method must take pipeline_config, +monitor, and verbose as arguments.

+
+
PIPELINE_BACKGROUND_METHODcallable

Callable to run pipeline in the background with monitoring. +If you set this as a static method (e.g. +PIPELINE_BACKGROUND_METHOD = staticmethod(my_callable)), +then this function should take exactly two arguments: +pipeline_config and verbose. Otherwise, the function +should take three arguments, where the first is a reference +to this batch class, and the last two are the same as above.

+
+
+
+
+
Parameters:
+

config (str) – File path to config json or csv (str).

+
+
+

Methods

+ + + + + + + + + + + + + + + + + + +

cancel()

Cancel all pipeline modules for all batch jobs.

cancel_all(config[, verbose])

Cancel all reV pipeline modules for all batch jobs.

delete()

Clear all of the batch sub job folders.

delete_all(config[, verbose])

Delete all reV batch sub job folders based on the job summary csv in the batch config directory.

run(config[, dry_run, delete, ...])

Run the reV batch job from a config file.

+

Attributes

+ + + + + + + + + + + + + + + +

PIPELINE_BACKGROUND_METHOD

Formattable string of the base pipeline CLI command.

PIPELINE_CLASS

Formattable string of the base pipeline CLI command.

job_table

Batch job summary table.

sub_dirs

Job sub directory paths.

+
+
+abstract property PIPELINE_CLASS#
+

Formattable string of the base pipeline CLI command.

+
+
Type:
+

str

+
+
+
+ +
+
+abstract property PIPELINE_BACKGROUND_METHOD#
+

Formattable string of the base pipeline CLI command.

+
+
Type:
+

str

+
+
+
+ +
+
+classmethod cancel_all(config, verbose=False)[source]#
+

Cancel all reV pipeline modules for all batch jobs.

+
+
Parameters:
+
    +
  • config (str) – File path to batch config json or csv (str).

  • +
  • verbose (bool) – Flag to turn on debug logging.

  • +
+
+
+
+ +
+
+classmethod delete_all(config, verbose=False)[source]#
+

Delete all reV batch sub job folders based on the job summary csv +in the batch config directory.

+
+
Parameters:
+
    +
  • config (str) – File path to batch config json or csv (str).

  • +
  • verbose (bool) – Flag to turn on debug logging.

  • +
+
+
+
+ +
+
+classmethod run(config, dry_run=False, delete=False, monitor_background=False, verbose=False)[source]#
+

Run the reV batch job from a config file.

+
+
Parameters:
+
    +
  • config (str) – File path to config json or csv (str).

  • +
  • dry_run (bool) – Flag to make job directories without running.

  • +
  • delete (bool) – Flag to delete all batch job sub directories based on the job +summary csv in the batch config directory.

  • +
  • monitor_background (bool) – Flag to monitor all batch pipelines continuously +in the background using the nohup command. Note that the +stdout/stderr will not be captured, but you can set a +pipeline “log_file” to capture logs.

  • +
  • verbose (bool) – Flag to turn on debug logging for the pipelines.

  • +
+
+
+
+ +
+
+cancel()[source]#
+

Cancel all pipeline modules for all batch jobs.

+
+ +
+
+delete()[source]#
+

Clear all of the batch sub job folders.

+

Only the batch sub folders listed in the job summary csv file +in the batch config directory are deleted.

+
+ +
+
+property job_table#
+

Batch job summary table.

+
+
Type:
+

pd.DataFrame

+
+
+
+ +
+
+property sub_dirs#
+

Job sub directory paths.

+
+
Type:
+

list

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.HardwareStatusRetriever.html b/_autosummary/gaps.legacy.HardwareStatusRetriever.html new file mode 100644 index 00000000..5efdcc57 --- /dev/null +++ b/_autosummary/gaps.legacy.HardwareStatusRetriever.html @@ -0,0 +1,633 @@ + + + + + + + + + + + gaps.legacy.HardwareStatusRetriever — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy.HardwareStatusRetriever#

+
+
+class HardwareStatusRetriever(hardware='slurm', subprocess_manager=None)[source]#
+

Bases: HardwareStatusRetriever

+

Query hardware for job status.

+

Initialize HardwareStatusRetriever.

+
+
Parameters:
+
    +
  • hardware (str) – Name of hardware that a pipeline is being run on: eagle, +slurm, local. Defaults to “slurm”.

  • +
  • subprocess_manager (PBS | SLURM | None, optional) – Optional initialized subprocess manager to use to check job +statuses. This can be input with cached queue data to avoid +constantly querying the HPC. By default, None.

  • +
+
+
+

Methods

+ + + +
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.Pipeline.html b/_autosummary/gaps.legacy.Pipeline.html new file mode 100644 index 00000000..d625a0a5 --- /dev/null +++ b/_autosummary/gaps.legacy.Pipeline.html @@ -0,0 +1,766 @@ + + + + + + + + + + + gaps.legacy.Pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy.Pipeline#

+
+
+class Pipeline(pipeline, monitor=True, verbose=False)[source]#
+

Bases: Pipeline

+

Legacy reV-like pipeline execution framework.

+

When subclassing this object, you MUST set the following +properties in the __init__ method:

+
+
monitorbool

Flag to perform continuous monitoring of the pipeline.

+
+
verbosebool

Flag to submit pipeline steps with “-v” flag for debug +logging.

+
+
_configobject

reV-style config object. The config object MUST have the +following attributes:

+
+
namestr

Job name (typically defaults to the output directory +name).

+
+
diroutstr

Output file directory (typically the same directory +that contains the config file).

+
+
hardwarestr

Name of hardware that the pipeline is being run on +(typically “eagle”).

+
+
+
+
_run_listlist

List of dictionaries, each with a single key-value pair, +where the key represents the command and the value +represents the command config filepath to substitute into +the CMD_BASE string.

+
+
+

You must also call _init_status() in the initializer. +If you want logging outputs during the submit step, make sure +to init the “gaps” logger.

+
+
Parameters:
+
    +
  • pipeline (str | dict) – Pipeline config file path or dictionary.

  • +
  • monitor (bool, optional) – Flag to perform continuous monitoring of the pipeline. +By default, True.

  • +
  • verbose (bool, optional) – Flag to submit pipeline steps with “-v” flag for debug +logging. By default, False.

  • +
+
+
+

Methods

+ + + + + + + + + +

cancel_all(pipeline)

Cancel all jobs corresponding to pipeline.

run(pipeline[, monitor, verbose])

Run the reV-style pipeline.

+

Attributes

+ + + + + + + + + + + + + + + +

CMD_BASE

Formattable string of the base pipeline CLI command.

COMMANDS

List of pipeline command names (as str).

name

Name of the pipeline job (directory of status file).

status

A gaps pipeline status object.

+
+
+abstract property CMD_BASE#
+

Formattable string of the base pipeline CLI command.

+
+
Type:
+

str

+
+
+
+ +
+
+abstract property COMMANDS#
+

List of pipeline command names (as str).

+
+
Type:
+

list

+
+
+
+ +
+
+classmethod run(pipeline, monitor=True, verbose=False)[source]#
+

Run the reV-style pipeline.

+
+
Parameters:
+
    +
  • pipeline (str | dict) – Pipeline config file path or dictionary.

  • +
  • monitor (bool, optional) – Flag to perform continuous monitoring of the pipeline. +By default, True.

  • +
  • verbose (bool, optional) – Flag to submit pipeline steps with “-v” flag for debug +logging. By default, False.

  • +
+
+
+
+ +
+
+classmethod cancel_all(pipeline)[source]#
+

Cancel all jobs corresponding to pipeline.

+
+
Parameters:
+

pipeline (path-like) – Pipeline config file path.

+
+
+
+ +
+
+property name#
+

Name of the pipeline job (directory of status file).

+
+
Type:
+

str

+
+
+
+ +
+
+property status#
+

A gaps pipeline status object.

+
+
Type:
+

Status

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.PipelineError.html b/_autosummary/gaps.legacy.PipelineError.html new file mode 100644 index 00000000..e790bd14 --- /dev/null +++ b/_autosummary/gaps.legacy.PipelineError.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.legacy.PipelineError — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy.PipelineError#

+
+
+exception PipelineError[source]#
+

Error for pipeline execution failure.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.Status.html b/_autosummary/gaps.legacy.Status.html new file mode 100644 index 00000000..aab09adf --- /dev/null +++ b/_autosummary/gaps.legacy.Status.html @@ -0,0 +1,1042 @@ + + + + + + + + + + + gaps.legacy.Status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy.Status#

+
+
+class Status(status_dir)[source]#
+

Bases: Status

+

Base class for data pipeline health and status information.

+

Initialize Status.

+
+
Parameters:
+

status_dir (path-like) – Directory containing zero or more job json status files.

+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

add_job(status_dir, module, job_name[, ...])

Add a job to status json.

as_df([commands, index_name, include_cols])

Format status as pandas DataFrame.

clear()

copy()

dump()

Dump status json w/ backup file in case process gets killed.

fromkeys(iterable[, value])

get(k[,d])

items()

job_exists(status_dir, job_name[, command])

Check whether a job exists and return a bool.

keys()

make_job_file(status_dir, module, job_name, ...)

Make a json file recording the status of a single job.

make_single_job_file(status_dir, command, ...)

Make a json file recording the status of a single job.

mark_job_as_submitted(status_dir, command, ...)

Mark a job in the status json as "submitted".

parse_command_status(status_dir, command[, key])

Parse key from job status(es) from the given command.

pop(k[,d])

If key is not found, d is returned if given, otherwise KeyError is raised.

popitem()

as a 2-tuple; but raise KeyError if D is empty.

record_monitor_pid(status_dir, pid)

Make a json file recording the PID of the monitor process.

reload()

Re-load the data from disk.

retrieve_job_status(status_dir, module, job_name)

Update and retrieve job status.

setdefault(k[,d])

update([E, ]**F)

If E present and has a .keys() method, does: for k in E: D[k] = E[k] If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v In either case, this is followed by: for k, v in F.items(): D[k] = v

update_from_all_job_files([check_hardware, ...])

Update status from all single-job job status files.

update_job_status(command, job_name[, ...])

Update single-job job status from single-job job status file.

values()

+

Attributes

+ + + + + + + + + +

job_hardware

Flat list of job hardware options.

job_ids

Flat list of job ids.

+
+
+classmethod retrieve_job_status(status_dir, module, job_name, hardware='slurm', subprocess_manager=None)[source]#
+

Update and retrieve job status.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory containing json status file.

  • +
  • module (str) – Module that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • hardware (str) – Name of hardware that this pipeline is being run on: eagle, +slurm, local. Defaults to “slurm”. This specifies how job +are queried for status.

  • +
  • subprocess_manager (None | SLURM) – Optional initialized subprocess manager to use to check job +statuses. This can be input with cached queue data to avoid +constantly querying the HPC.

  • +
+
+
Returns:
+

status (str | None) – Status string or None if job/module not found.

+
+
+
+ +
+
+classmethod add_job(status_dir, module, job_name, replace=False, job_attrs=None)[source]#
+

Add a job to status json.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory containing json status file.

  • +
  • module (str) – Module that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • replace (bool) – Flag to force replacement of pre-existing job status.

  • +
  • job_attrs (dict) – Job attributes. Should include ‘job_id’ if running on HPC.

  • +
+
+
+
+ +
+
+static make_job_file(status_dir, module, job_name, attrs)[source]#
+

Make a json file recording the status of a single job.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory to put json status file.

  • +
  • module (str) – Module that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • attrs (str) – Dictionary of job attributes that represent the job status +attributes.

  • +
+
+
+
+ +
+
+as_df(commands=None, index_name='job_name', include_cols=None)[source]#
+

Format status as pandas DataFrame.

+
+
Parameters:
+
    +
  • commands (container, optional) – A container of command names to collect. If None, all +commands in the status file are collected. +By default, None.

  • +
  • index_name (str, optional) – Name to assign to index of DataFrame. +By default, “job_name”.

  • +
+
+
Returns:
+

pd.DataFrame – Pandas DataFrame containing status information.

+
+
+
+ +
+
+clear() None.  Remove all items from D.#
+
+ +
+
+dump()[source]#
+

Dump status json w/ backup file in case process gets killed.

+
+ +
+
+get(k[, d]) D[k] if k in D, else d.  d defaults to None.#
+
+ +
+
+items() a set-like object providing a view on D's items#
+
+ +
+
+classmethod job_exists(status_dir, job_name, command=None)[source]#
+

Check whether a job exists and return a bool.

+

This method will return True if the job name is found as a +key in the dictionary under the command keyword, or any +command if a None value is passed.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory containing json status file.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • command (str, optional) – CLI command that the job belongs to. By default, None, +which checks all commands for the job name.

  • +
+
+
Returns:
+

exists (bool) – True if the job exists in the status json.

+
+
+
+ +
+
+property job_hardware#
+

Flat list of job hardware options.

+
+
Type:
+

list

+
+
+
+ +
+
+property job_ids#
+

Flat list of job ids.

+
+
Type:
+

list

+
+
+
+ +
+
+keys() a set-like object providing a view on D's keys#
+
+ +
+
+static make_single_job_file(status_dir, command, job_name, attrs)[source]#
+

Make a json file recording the status of a single job.

+

This method should primarily be used by HPC nodes to mark the +status of individual jobs.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory to put json status file.

  • +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • attrs (dict) – Dictionary of job attributes that represent the job status +attributes.

  • +
+
+
+
+ +
+
+classmethod mark_job_as_submitted(status_dir, command, job_name, replace=False, job_attrs=None)[source]#
+

Mark a job in the status json as “submitted”.

+

Status json does not have to exist - it is created if missing. +If it exists, the job will only be updated if it does not +exist (i.e. not submitted), unless replace is set to True.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory containing json status file.

  • +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • replace (bool, optional) – Flag to force replacement of pre-existing job status. +By default, False.

  • +
  • job_attrs (dict, optional) – Job attributes. Should include ‘job_id’ if running on HPC. +By default, None.

  • +
+
+
+
+ +
+
+classmethod parse_command_status(status_dir, command, key=StatusField.OUT_FILE)[source]#
+

Parse key from job status(es) from the given command.

+

This command DOES NOT check the HPC queue for jobs and therefore +DOES NOT update the status of previously running jobs that have +errored out of the HPC queue.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory containing the status file to parse.

  • +
  • command (str) – Target CLI command to parse.

  • +
  • key (StatusField | str, optional) – Parsing target of previous command. By default, +StatusField.OUT_FILE.

  • +
+
+
Returns:
+

list – Arguments parsed from the status file in status_dir from +the input command. This list is empty if the key is not +found in the job status, or if the command does not exist in +status.

+
+
+
+ +
+
+pop(k[, d]) v, remove specified key and return the corresponding value.#
+

If key is not found, d is returned if given, otherwise KeyError is raised.

+
+ +
+
+popitem() (k, v), remove and return some (key, value) pair#
+

as a 2-tuple; but raise KeyError if D is empty.

+
+ +
+
+static record_monitor_pid(status_dir, pid)[source]#
+

Make a json file recording the PID of the monitor process.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory to put json status file.

  • +
  • pid (int) – PID of the monitoring process.

  • +
+
+
+
+ +
+
+reload()[source]#
+

Re-load the data from disk.

+
+ +
+
+setdefault(k[, d]) D.get(k,d), also set D[k]=d if k not in D#
+
+ +
+
+update([E, ]**F) None.  Update D from mapping/iterable E and F.#
+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+
+ +
+
+update_from_all_job_files(check_hardware=False, purge=True)[source]#
+

Update status from all single-job job status files.

+

This method loads all single-job status files in the target +directory and updates the Status object with the single-job +statuses.

+
+
Parameters:
+
    +
  • check_hardware (bool, optional) – Option to check hardware status for job failures for jobs +with a “running” status. This is useful because the +“running” status may not be correctly updated if the job +terminates abnormally on the HPC. By default, False.

  • +
  • purge (bool, optional) – Option to purge the individual status files. +By default, True.

  • +
+
+
Returns:
+

Status – This instance of Status with updated job properties.

+
+
+
+ +
+
+update_job_status(command, job_name, hardware_status_retriever=None)[source]#
+

Update single-job job status from single-job job status file.

+

If the status for a given command/job name combination is not +found, the status object remains unchanged.

+
+
Parameters:
+
    +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • hardware_status_retriever (HardwareStatusRetriever, optional) – Hardware status retriever. By default, None, which creates +an instance internally.

  • +
+
+
+
+ +
+
+values() an object providing a view on D's values#
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.legacy.html b/_autosummary/gaps.legacy.html new file mode 100644 index 00000000..18c5372c --- /dev/null +++ b/_autosummary/gaps.legacy.html @@ -0,0 +1,614 @@ + + + + + + + + + + + gaps.legacy — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.legacy#

+

Legacy reV-like status manager.

+

Classes

+ + + + + + + + + + + + + + + +

BatchJob(config)

Legacy reV-like batch job framework.

HardwareStatusRetriever([hardware, ...])

Query hardware for job status.

Pipeline(pipeline[, monitor, verbose])

Legacy reV-like pipeline execution framework.

Status(status_dir)

Base class for data pipeline health and status information.

+

Exceptions

+ + + + + + +

PipelineError

Error for pipeline execution failure.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.FORMAT.html b/_autosummary/gaps.log.FORMAT.html new file mode 100644 index 00000000..45c98aba --- /dev/null +++ b/_autosummary/gaps.log.FORMAT.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.log.FORMAT — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.FORMAT#

+
+
+FORMAT = 'gaps (%(levelname)s) - [%(filename)s:%(lineno)d] : %(message)s'#
+

Default format for gaps logging.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.add_handlers.html b/_autosummary/gaps.log.add_handlers.html new file mode 100644 index 00000000..8a7c64b6 --- /dev/null +++ b/_autosummary/gaps.log.add_handlers.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.log.add_handlers — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.add_handlers#

+
+
+add_handlers(handlers)[source]#
+

Add handlers to logger ensuring they do not already exist.

+
+
Parameters:
+

handlers (list) – Handlers to add to logger instance.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.html b/_autosummary/gaps.log.html new file mode 100644 index 00000000..7ec1b57c --- /dev/null +++ b/_autosummary/gaps.log.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps.log — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log#

+

Logging utilities.

+

Some of the code in this module is borrowed from rex +(NREL/rex).

+

Module attributes

+ + + + + + +

FORMAT

Default format for gaps logging.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

add_handlers(handlers)

Add handlers to logger ensuring they do not already exist.

init_logger([stream, level, file, fmt])

Initialize and setup logging instance.

log_mem([log_level])

Log the memory usage to the input logger object.

log_versions()

Log package versions.

make_handler(factory[, level, fmt])

Make a handler to add to a Logger instance.

make_log_file_handler(file_path)

Make a file handler to add to a Logger instance.

make_log_stream_handler()

Make a file handler to add to a Logger instance.

print_logging_info()

Print logger names, levels, and handlers.

print_logging_info_all_libraries()

Print logger info from all libraries.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.init_logger.html b/_autosummary/gaps.log.init_logger.html new file mode 100644 index 00000000..5790a170 --- /dev/null +++ b/_autosummary/gaps.log.init_logger.html @@ -0,0 +1,631 @@ + + + + + + + + + + + gaps.log.init_logger — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.init_logger#

+
+
+init_logger(stream=True, level='INFO', file=None, fmt='gaps (%(levelname)s) - [%(filename)s:%(lineno)d] : %(message)s')[source]#
+

Initialize and setup logging instance.

+
+
Parameters:
+
    +
  • stream (bool, optional) – Option to add a StreamHandler along with FileHandler. +By default True.

  • +
  • level (str, optional) – Level of logging to capture. If multiple handlers/log_files are +requested in a single call of this function, the specified +logging level will be applied to all requested handlers. +By default, “INFO”.

  • +
  • file (str | list, optional) – Path to file that should be used for logging. This can also be +a list of filepaths to use for multiple log files. +By default, None.

  • +
  • fmt (str, optional) – Format for loggings. By default FORMAT.

  • +
+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.log_mem.html b/_autosummary/gaps.log.log_mem.html new file mode 100644 index 00000000..cc4ffc52 --- /dev/null +++ b/_autosummary/gaps.log.log_mem.html @@ -0,0 +1,626 @@ + + + + + + + + + + + gaps.log.log_mem — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.log_mem#

+
+
+log_mem(log_level='DEBUG')[source]#
+

Log the memory usage to the input logger object.

+
+
Parameters:
+
    +
  • logger (logging.Logger) – Logger object to log memory message to.

  • +
  • log_level (str) – DEBUG or INFO for different log levels for this log message.

  • +
+
+
Returns:
+

msg (str) – Memory utilization log message string.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.log_versions.html b/_autosummary/gaps.log.log_versions.html new file mode 100644 index 00000000..27d2aa3b --- /dev/null +++ b/_autosummary/gaps.log.log_versions.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.log.log_versions — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.log_versions#

+
+
+log_versions()[source]#
+

Log package versions.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.make_handler.html b/_autosummary/gaps.log.make_handler.html new file mode 100644 index 00000000..24072fd1 --- /dev/null +++ b/_autosummary/gaps.log.make_handler.html @@ -0,0 +1,629 @@ + + + + + + + + + + + gaps.log.make_handler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.make_handler#

+
+
+make_handler(factory, level='INFO', fmt='gaps (%(levelname)s) - [%(filename)s:%(lineno)d] : %(message)s')[source]#
+

Make a handler to add to a Logger instance.

+
+
Parameters:
+
    +
  • factory (callable) – A callable function to create the default handler.

  • +
  • level (str, optional) – A string representing the logging level for the handler. +By default “INFO”.

  • +
  • fmt (str, optional) – A string representing the formatting for logging calls. +By default FORMAT.

  • +
+
+
Returns:
+

handler (logging.Handler) – Handler with the specified log level and format.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.make_log_file_handler.html b/_autosummary/gaps.log.make_log_file_handler.html new file mode 100644 index 00000000..702b0207 --- /dev/null +++ b/_autosummary/gaps.log.make_log_file_handler.html @@ -0,0 +1,625 @@ + + + + + + + + + + + gaps.log.make_log_file_handler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.make_log_file_handler#

+
+
+make_log_file_handler(file_path)[source]#
+

Make a file handler to add to a Logger instance.

+

If the directory structure for file_path does not exist, it is +created before initializing the FileHandler.

+
+
Parameters:
+

file_path (str, optional) – Path to the output log file.

+
+
Returns:
+

handler (logging.FileHandler) – File handler with file_path as the name.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.make_log_stream_handler.html b/_autosummary/gaps.log.make_log_stream_handler.html new file mode 100644 index 00000000..90509abb --- /dev/null +++ b/_autosummary/gaps.log.make_log_stream_handler.html @@ -0,0 +1,620 @@ + + + + + + + + + + + gaps.log.make_log_stream_handler — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.make_log_stream_handler#

+
+
+make_log_stream_handler()[source]#
+

Make a file handler to add to a Logger instance.

+
+
Returns:
+

handler (logging.StreamHandler) – Stream handler with name “stream”.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.print_logging_info.html b/_autosummary/gaps.log.print_logging_info.html new file mode 100644 index 00000000..5803be2e --- /dev/null +++ b/_autosummary/gaps.log.print_logging_info.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.log.print_logging_info — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.log.print_logging_info#

+
+
+print_logging_info()[source]#
+

Print logger names, levels, and handlers.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.log.print_logging_info_all_libraries.html b/_autosummary/gaps.log.print_logging_info_all_libraries.html new file mode 100644 index 00000000..683d03ec --- /dev/null +++ b/_autosummary/gaps.log.print_logging_info_all_libraries.html @@ -0,0 +1,619 @@ + + + + + + + + + + + gaps.log.print_logging_info_all_libraries — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + + + + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.pipeline.Pipeline.html b/_autosummary/gaps.pipeline.Pipeline.html new file mode 100644 index 00000000..98b82468 --- /dev/null +++ b/_autosummary/gaps.pipeline.Pipeline.html @@ -0,0 +1,706 @@ + + + + + + + + + + + gaps.pipeline.Pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.pipeline.Pipeline#

+
+
+class Pipeline(pipeline, monitor=True)[source]#
+

Bases: object

+

gaps pipeline execution framework.

+
+
Parameters:
+
    +
  • pipeline (path-like) – Pipeline config file path.

  • +
  • monitor (bool, optional) – Flag to perform continuous monitoring of the pipeline. +By default, True.

  • +
+
+
Raises:
+

gapsConfigError – If “pipeline” key not in config file.

+
+
+

Methods

+ + + + + + + + + +

cancel_all(pipeline)

Cancel all jobs corresponding to pipeline.

run(pipeline[, monitor])

Run the pipeline.

+

Attributes

+ + + + + + + + + + + + +

COMMANDS

name

Name of the pipeline job (directory of status file).

status

A gaps pipeline status object.

+
+
+property status#
+

A gaps pipeline status object.

+
+
Type:
+

Status

+
+
+
+ +
+
+property name#
+

Name of the pipeline job (directory of status file).

+
+
Type:
+

str

+
+
+
+ +
+
+classmethod cancel_all(pipeline)[source]#
+

Cancel all jobs corresponding to pipeline.

+
+
Parameters:
+

pipeline (path-like) – Pipeline config file path.

+
+
+
+ +
+
+classmethod run(pipeline, monitor=True)[source]#
+

Run the pipeline.

+
+
Parameters:
+
    +
  • pipeline (path-like) – Pipeline config file path.

  • +
  • monitor (bool) – Flag to perform continuous monitoring of the pipeline.

  • +
+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.pipeline.html b/_autosummary/gaps.pipeline.html new file mode 100644 index 00000000..95646faa --- /dev/null +++ b/_autosummary/gaps.pipeline.html @@ -0,0 +1,605 @@ + + + + + + + + + + + gaps.pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.pipeline#

+

GAPs Pipeline architecture.

+

Functions

+ + + + + + +

parse_previous_status(status_dir, command[, key])

Parse key from job status(es) from the previous pipeline step.

+

Classes

+ + + + + + +

Pipeline(pipeline[, monitor])

gaps pipeline execution framework.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.pipeline.parse_previous_status.html b/_autosummary/gaps.pipeline.parse_previous_status.html new file mode 100644 index 00000000..4f7b407e --- /dev/null +++ b/_autosummary/gaps.pipeline.parse_previous_status.html @@ -0,0 +1,639 @@ + + + + + + + + + + + gaps.pipeline.parse_previous_status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.pipeline.parse_previous_status#

+
+
+parse_previous_status(status_dir, command, key=StatusField.OUT_FILE)[source]#
+

Parse key from job status(es) from the previous pipeline step.

+

This command DOES NOT check the HPC queue for jobs and therefore +DOES NOT update the status of previously running jobs that have +errored out of the HPC queue. For best results, ensure that all +previous steps of a pipeline have finished processing before calling +this function.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory containing the status file to parse.

  • +
  • command (str) – Current CLI command (i.e. current pipeline step).

  • +
  • key (StatusField | str, optional) – Parsing target of previous command. By default, +StatusField.OUT_FILE.

  • +
+
+
Returns:
+

out (list) – Arguments parsed from the status file in status_dir from +the command preceding the input command arg. This list is +empty if the key is not found in the job status, or if +the previous step index does not exist in status.

+
+
Raises:
+

gapsKeyError – If command not in status.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.project_points.ProjectPoints.html b/_autosummary/gaps.project_points.ProjectPoints.html new file mode 100644 index 00000000..b5bfbaf2 --- /dev/null +++ b/_autosummary/gaps.project_points.ProjectPoints.html @@ -0,0 +1,805 @@ + + + + + + + + + + + gaps.project_points.ProjectPoints — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.project_points.ProjectPoints#

+
+
+class ProjectPoints(points, **kwargs)[source]#
+

Bases: object

+

Class to manage site and SAM input configuration requests.

+

Initialize ProjectPoints.

+
+
Parameters:
+
    +
  • points (int | slice | list | tuple | str | pd.DataFrame | dict) – Slice specifying project points, string pointing to a +project points csv, or a DataFrame containing the effective +csv contents. Can also be a single integer site value.

  • +
  • **kwargs – Keyword-argument pairs to add to project points DataFrame. +The key should be the column name, and the value should +be the value to add under the column name. Values must +either be a scalar or match the length of the DataFrame +resulting from the points input.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + +

from_range(split_range, points, **kwargs)

Create a ProjectPoints instance from a range indices.

get_sites_from_key(key, value)

Get a site list for which the key equals the value.

index(gid)

Index location (iloc not loc) for a resource gid.

join_df(df2[, key])

Join df2 to the _df attribute using _df's gid as the join key.

split([sites_per_split])

Split the project points into sub-groups by number of sites.

+

Attributes

+ + + + + + + + + + + + +

df

Project points DataFrame of site info.

gids

Gids (resource file index values) of sites.

sites_as_slice

Sites in slice format or list if non-sequential.

+
+
+property df#
+

Project points DataFrame of site info.

+
+
Type:
+

pd.DataFrame

+
+
+
+ +
+
+property gids#
+

Gids (resource file index values) of sites.

+
+
Type:
+

list

+
+
+
+ +
+
+property sites_as_slice#
+

Sites in slice format or list if non-sequential.

+
+
Type:
+

list | slice

+
+
+
+ +
+
+index(gid)[source]#
+

Index location (iloc not loc) for a resource gid.

+
+
Parameters:
+

gid (int) – Resource GID found in the project points gid column.

+
+
Returns:
+

ind (int) – Row index of gid in the project points DataFrame.

+
+
+
+ +
+
+join_df(df2, key='gid')[source]#
+

Join df2 to the _df attribute using _df’s gid as the join key.

+

This can be used to add site-specific data to the project_points, +taking advantage of the ProjectPoints iterator/split functions +such that only the relevant site data is passed to the analysis +functions.

+
+
Parameters:
+
    +
  • df2 (pd.DataFrame) – DataFrame to be joined to the df attribute (this +instance of project points DataFrame). This likely contains +site-specific inputs that are to be passed to parallel +workers.

  • +
  • key (str) – Primary key of df2 to be joined to the df attribute +(this instance of the project points DataFrame). Primary key +of the self._df attribute is fixed as the gid column.

  • +
+
+
+
+ +
+
+get_sites_from_key(key, value)[source]#
+

Get a site list for which the key equals the value.

+
+
Parameters:
+
    +
  • key (str) – Name of key (column) in project points DataFrame.

  • +
  • value (int | float | str | obj) – Value to look for under the key column.

  • +
+
+
Returns:
+

sites (lis of ints) – List of sites (GID values) associated with the requested key +and value. If the key or value does not exist, an empty list +is returned.

+
+
+
+ +
+
+split(sites_per_split=100)[source]#
+

Split the project points into sub-groups by number of sites.

+
+
Parameters:
+

sites_per_split (int, optional) – Number of points in each sub-group. By default, 100.

+
+
Yields:
+

ProjectPoints – A new ProjectPoints instance with up to sites_per_split +number of points.

+
+
+
+ +
+
+classmethod from_range(split_range, points, **kwargs)[source]#
+

Create a ProjectPoints instance from a range indices.

+
+
Parameters:
+
    +
  • split_range (2-tuple) – Tuple containing the start and end index (iloc, not loc). +Last index is not included.

  • +
  • points (int | slice | list | tuple | str | pd.DataFrame | dict) – Slice specifying project points, string pointing to a +project points csv, or a DataFrame containing the effective +csv contents. Can also be a single integer site value.

  • +
  • **kwargs – Keyword-argument pairs to add to project points DataFrame. +The key should be the column name, and the value should +be the value to add under the column name. Values must +either be a scalar or match the length of the DataFrame +resulting from the points input.

  • +
+
+
Returns:
+

ProjectPoints – A new ProjectPoints instance with a range sampled from the +points input according to split_range.

+
+
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.project_points.html b/_autosummary/gaps.project_points.html new file mode 100644 index 00000000..0bd597ed --- /dev/null +++ b/_autosummary/gaps.project_points.html @@ -0,0 +1,597 @@ + + + + + + + + + + + gaps.project_points — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.project_points#

+

GAPs Project Points

+

Classes

+ + + + + + +

ProjectPoints(points, **kwargs)

Class to manage site and SAM input configuration requests.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.HardwareOption.html b/_autosummary/gaps.status.HardwareOption.html new file mode 100644 index 00000000..4be476c1 --- /dev/null +++ b/_autosummary/gaps.status.HardwareOption.html @@ -0,0 +1,650 @@ + + + + + + + + + + + gaps.status.HardwareOption — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.HardwareOption#

+
+
+class HardwareOption(value)[source]#
+

Bases: CaseInsensitiveEnum

+

A collection of hardware options.

+

Methods

+ + + + + + +

reset_all_cached_queries()

Reset all cached hardware queries.

+

Attributes

+ + + + + + + + + + + + + + + +

LOCAL

KESTREL

EAGLE

PEREGRINE

+
+
+classmethod reset_all_cached_queries()[source]#
+

Reset all cached hardware queries.

+
+ +
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.HardwareStatusRetriever.html b/_autosummary/gaps.status.HardwareStatusRetriever.html new file mode 100644 index 00000000..5e1e5c71 --- /dev/null +++ b/_autosummary/gaps.status.HardwareStatusRetriever.html @@ -0,0 +1,629 @@ + + + + + + + + + + + gaps.status.HardwareStatusRetriever — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.HardwareStatusRetriever#

+
+
+class HardwareStatusRetriever(subprocess_manager=None)[source]#
+

Bases: object

+

Query hardware for job status.

+

Initialize HardwareStatusRetriever.

+
+
Parameters:
+

subprocess_manager (PBS | SLURM | None, optional) – Optional initialized subprocess manager to use to check job +statuses. This can be input with cached queue data to avoid +constantly querying the HPC. By default, None.

+
+
+

Methods

+ + + +
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.QOSOption.html b/_autosummary/gaps.status.QOSOption.html new file mode 100644 index 00000000..57847576 --- /dev/null +++ b/_autosummary/gaps.status.QOSOption.html @@ -0,0 +1,630 @@ + + + + + + + + + + + gaps.status.QOSOption — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.QOSOption#

+
+
+class QOSOption(value)[source]#
+

Bases: CaseInsensitiveEnum

+

A collection of job QOS options.

+

Attributes

+ + + + + + + + + + + + +

NORMAL

HIGH

UNSPECIFIED

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.Status.html b/_autosummary/gaps.status.Status.html new file mode 100644 index 00000000..d846dcb0 --- /dev/null +++ b/_autosummary/gaps.status.Status.html @@ -0,0 +1,999 @@ + + + + + + + + + + + gaps.status.Status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.Status#

+
+
+class Status(status_dir)[source]#
+

Bases: UserDict

+

Base class for data pipeline health and status information.

+

This class facilitates the creating of individual job status files +which the main Status object can collect.

+

Initialize Status.

+
+
Parameters:
+

status_dir (path-like) – Directory containing zero or more job json status files.

+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

as_df([commands, index_name, include_cols])

Format status as pandas DataFrame.

clear()

copy()

dump()

Dump status json w/ backup file in case process gets killed.

fromkeys(iterable[, value])

get(k[,d])

items()

job_exists(status_dir, job_name[, command])

Check whether a job exists and return a bool.

keys()

make_single_job_file(status_dir, command, ...)

Make a json file recording the status of a single job.

mark_job_as_submitted(status_dir, command, ...)

Mark a job in the status json as "submitted".

parse_command_status(status_dir, command[, key])

Parse key from job status(es) from the given command.

pop(k[,d])

If key is not found, d is returned if given, otherwise KeyError is raised.

popitem()

as a 2-tuple; but raise KeyError if D is empty.

record_monitor_pid(status_dir, pid)

Make a json file recording the PID of the monitor process.

reload()

Re-load the data from disk.

retrieve_job_status(status_dir, command, ...)

Update and retrieve job status.

setdefault(k[,d])

update([E, ]**F)

If E present and has a .keys() method, does: for k in E: D[k] = E[k] If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v In either case, this is followed by: for k, v in F.items(): D[k] = v

update_from_all_job_files([check_hardware, ...])

Update status from all single-job job status files.

update_job_status(command, job_name[, ...])

Update single-job job status from single-job job status file.

values()

+

Attributes

+ + + + + + + + + +

job_hardware

Flat list of job hardware options.

job_ids

Flat list of job ids.

+
+
+property job_ids#
+

Flat list of job ids.

+
+
Type:
+

list

+
+
+
+ +
+
+property job_hardware#
+

Flat list of job hardware options.

+
+
Type:
+

list

+
+
+
+ +
+
+as_df(commands=None, index_name='job_name', include_cols=None)[source]#
+

Format status as pandas DataFrame.

+
+
Parameters:
+
    +
  • commands (container, optional) – A container of command names to collect. If None, all +commands in the status file are collected. +By default, None.

  • +
  • index_name (str, optional) – Name to assign to index of DataFrame. +By default, “job_name”.

  • +
+
+
Returns:
+

pd.DataFrame – Pandas DataFrame containing status information.

+
+
+
+ +
+
+reload()[source]#
+

Re-load the data from disk.

+
+ +
+
+dump()[source]#
+

Dump status json w/ backup file in case process gets killed.

+
+ +
+
+update_from_all_job_files(check_hardware=False, purge=True)[source]#
+

Update status from all single-job job status files.

+

This method loads all single-job status files in the target +directory and updates the Status object with the single-job +statuses.

+
+
Parameters:
+
    +
  • check_hardware (bool, optional) – Option to check hardware status for job failures for jobs +with a “running” status. This is useful because the +“running” status may not be correctly updated if the job +terminates abnormally on the HPC. By default, False.

  • +
  • purge (bool, optional) – Option to purge the individual status files. +By default, True.

  • +
+
+
Returns:
+

Status – This instance of Status with updated job properties.

+
+
+
+ +
+
+update_job_status(command, job_name, hardware_status_retriever=None)[source]#
+

Update single-job job status from single-job job status file.

+

If the status for a given command/job name combination is not +found, the status object remains unchanged.

+
+
Parameters:
+
    +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • hardware_status_retriever (HardwareStatusRetriever, optional) – Hardware status retriever. By default, None, which creates +an instance internally.

  • +
+
+
+
+ +
+
+static record_monitor_pid(status_dir, pid)[source]#
+

Make a json file recording the PID of the monitor process.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory to put json status file.

  • +
  • pid (int) – PID of the monitoring process.

  • +
+
+
+
+ +
+
+static make_single_job_file(status_dir, command, job_name, attrs)[source]#
+

Make a json file recording the status of a single job.

+

This method should primarily be used by HPC nodes to mark the +status of individual jobs.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory to put json status file.

  • +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • attrs (dict) – Dictionary of job attributes that represent the job status +attributes.

  • +
+
+
+
+ +
+
+classmethod mark_job_as_submitted(status_dir, command, job_name, replace=False, job_attrs=None)[source]#
+

Mark a job in the status json as “submitted”.

+

Status json does not have to exist - it is created if missing. +If it exists, the job will only be updated if it does not +exist (i.e. not submitted), unless replace is set to True.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory containing json status file.

  • +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • replace (bool, optional) – Flag to force replacement of pre-existing job status. +By default, False.

  • +
  • job_attrs (dict, optional) – Job attributes. Should include ‘job_id’ if running on HPC. +By default, None.

  • +
+
+
+
+ +
+
+classmethod job_exists(status_dir, job_name, command=None)[source]#
+

Check whether a job exists and return a bool.

+

This method will return True if the job name is found as a +key in the dictionary under the command keyword, or any +command if a None value is passed.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory containing json status file.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • command (str, optional) – CLI command that the job belongs to. By default, None, +which checks all commands for the job name.

  • +
+
+
Returns:
+

exists (bool) – True if the job exists in the status json.

+
+
+
+ +
+
+classmethod retrieve_job_status(status_dir, command, job_name, subprocess_manager=None)[source]#
+

Update and retrieve job status.

+
+
Parameters:
+
    +
  • status_dir (str) – Directory containing json status file.

  • +
  • command (str) – CLI command that the job belongs to.

  • +
  • job_name (str) – Unique job name identification.

  • +
  • subprocess_manager (None | SLURM, optional) – Optional initialized subprocess manager to use to check job +statuses. This can be input with cached queue data to avoid +constantly querying the HPC.

  • +
+
+
Returns:
+

status (str | None) – Status string or None if job/command not found.

+
+
+
+ +
+
+classmethod parse_command_status(status_dir, command, key=StatusField.OUT_FILE)[source]#
+

Parse key from job status(es) from the given command.

+

This command DOES NOT check the HPC queue for jobs and therefore +DOES NOT update the status of previously running jobs that have +errored out of the HPC queue.

+
+
Parameters:
+
    +
  • status_dir (path-like) – Directory containing the status file to parse.

  • +
  • command (str) – Target CLI command to parse.

  • +
  • key (StatusField | str, optional) – Parsing target of previous command. By default, +StatusField.OUT_FILE.

  • +
+
+
Returns:
+

list – Arguments parsed from the status file in status_dir from +the input command. This list is empty if the key is not +found in the job status, or if the command does not exist in +status.

+
+
+
+ +
+
+clear() None.  Remove all items from D.#
+
+ +
+
+get(k[, d]) D[k] if k in D, else d.  d defaults to None.#
+
+ +
+
+items() a set-like object providing a view on D's items#
+
+ +
+
+keys() a set-like object providing a view on D's keys#
+
+ +
+
+pop(k[, d]) v, remove specified key and return the corresponding value.#
+

If key is not found, d is returned if given, otherwise KeyError is raised.

+
+ +
+
+popitem() (k, v), remove and return some (key, value) pair#
+

as a 2-tuple; but raise KeyError if D is empty.

+
+ +
+
+setdefault(k[, d]) D.get(k,d), also set D[k]=d if k not in D#
+
+ +
+
+update([E, ]**F) None.  Update D from mapping/iterable E and F.#
+

If E present and has a .keys() method, does: for k in E: D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v

+
+ +
+
+values() an object providing a view on D's values#
+
+ +
+ +
+ + +
+ + + + + +
+ + + + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.StatusField.html b/_autosummary/gaps.status.StatusField.html new file mode 100644 index 00000000..32175c02 --- /dev/null +++ b/_autosummary/gaps.status.StatusField.html @@ -0,0 +1,657 @@ + + + + + + + + + + + gaps.status.StatusField — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.StatusField#

+
+
+class StatusField(value)[source]#
+

Bases: CaseInsensitiveEnum

+

A collection of required status fields in a status file.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

JOB_ID

JOB_STATUS

PIPELINE_INDEX

HARDWARE

QOS

OUT_FILE

TIME_SUBMITTED

TIME_START

TIME_END

TOTAL_RUNTIME

RUNTIME_SECONDS

MONITOR_PID

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.StatusOption.html b/_autosummary/gaps.status.StatusOption.html new file mode 100644 index 00000000..0f4c4c7d --- /dev/null +++ b/_autosummary/gaps.status.StatusOption.html @@ -0,0 +1,639 @@ + + + + + + + + + + + gaps.status.StatusOption — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.StatusOption#

+
+
+class StatusOption(value)[source]#
+

Bases: CaseInsensitiveEnum

+

A collection of job status options.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + +

NOT_SUBMITTED

SUBMITTED

RUNNING

SUCCESSFUL

FAILED

COMPLETE

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.StatusUpdates.html b/_autosummary/gaps.status.StatusUpdates.html new file mode 100644 index 00000000..1695ab22 --- /dev/null +++ b/_autosummary/gaps.status.StatusUpdates.html @@ -0,0 +1,642 @@ + + + + + + + + + + + gaps.status.StatusUpdates — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status.StatusUpdates#

+
+
+class StatusUpdates(directory, command, job_name, job_attrs)[source]#
+

Bases: object

+

Context manager to track run function progress.

+

When this context is entered, a status file is written for the given +command/job combination, with the given job attributes. The job +status is set to “running”, and the start time is recorded. When the +context is exited, another status file is written with the end time +and total runtime values added. The status is also set to +“successful”, unless an uncaught exception was raised during the +function runtime, in which case the status is set to “failed”. If +the out_file attribute of this context manager is set before the +context is exited, that value is also written to the status file.

+

Initialize StatusUpdates.

+
+
Parameters:
+
    +
  • directory (path-like) – Directory to write status files to.

  • +
  • command (str) – Name of the command being run.

  • +
  • job_name (str) – Name of the job being run.

  • +
  • job_attrs (dict) – A dictionary containing job attributes that should be +written to the status file.

  • +
+
+
+

Methods

+ + + +
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.status.html b/_autosummary/gaps.status.html new file mode 100644 index 00000000..23365c46 --- /dev/null +++ b/_autosummary/gaps.status.html @@ -0,0 +1,615 @@ + + + + + + + + + + + gaps.status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.status#

+

gaps Job status manager.

+

Classes

+ + + + + + + + + + + + + + + + + + + + + + + + +

HardwareOption(value)

A collection of hardware options.

HardwareStatusRetriever([subprocess_manager])

Query hardware for job status.

QOSOption(value)

A collection of job QOS options.

Status(status_dir)

Base class for data pipeline health and status information.

StatusField(value)

A collection of required status fields in a status file.

StatusOption(value)

A collection of job status options.

StatusUpdates(directory, command, job_name, ...)

Context manager to track run function progress.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.utilities.CaseInsensitiveEnum.html b/_autosummary/gaps.utilities.CaseInsensitiveEnum.html new file mode 100644 index 00000000..2f225074 --- /dev/null +++ b/_autosummary/gaps.utilities.CaseInsensitiveEnum.html @@ -0,0 +1,633 @@ + + + + + + + + + + + gaps.utilities.CaseInsensitiveEnum — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.utilities.CaseInsensitiveEnum#

+
+
+class CaseInsensitiveEnum(value)[source]#
+

Bases: str, Enum

+

A string enum that is case insensitive.

+

Methods

+ + + + + + +

members_as_str()

Set of enum members as strings.

+
+
+classmethod members_as_str()[source]#
+

Set of enum members as strings.

+
+ +
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.utilities.html b/_autosummary/gaps.utilities.html new file mode 100644 index 00000000..6d025ffa --- /dev/null +++ b/_autosummary/gaps.utilities.html @@ -0,0 +1,611 @@ + + + + + + + + + + + gaps.utilities — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + + + + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.utilities.project_points_from_container_or_slice.html b/_autosummary/gaps.utilities.project_points_from_container_or_slice.html new file mode 100644 index 00000000..6f906cff --- /dev/null +++ b/_autosummary/gaps.utilities.project_points_from_container_or_slice.html @@ -0,0 +1,625 @@ + + + + + + + + + + + gaps.utilities.project_points_from_container_or_slice — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.utilities.project_points_from_container_or_slice#

+
+
+project_points_from_container_or_slice(project_points)[source]#
+

Parse project point input into a list of GIDs.

+
+
Parameters:
+

project_points (numeric | container) – A number or container of numbers that holds GID values. If a +mapping (e.g. dict, pd.DataFrame, etc), a “gid” must map to the +desired values.

+
+
Returns:
+

list – A list of integer GID values.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.utilities.recursively_update_dict.html b/_autosummary/gaps.utilities.recursively_update_dict.html new file mode 100644 index 00000000..e6bff9e4 --- /dev/null +++ b/_autosummary/gaps.utilities.recursively_update_dict.html @@ -0,0 +1,627 @@ + + + + + + + + + + + gaps.utilities.recursively_update_dict — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.utilities.recursively_update_dict#

+
+
+recursively_update_dict(existing, new)[source]#
+

Update a dictionary recursively.

+
+
Parameters:
+
    +
  • existing (dict) – Existing dictionary to update. Dictionary is copied before +recursive update is applied.

  • +
  • new (dict) – New dictionary with data to add to existing.

  • +
+
+
Returns:
+

dict – Existing dictionary with data updated from new dictionary.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.utilities.resolve_path.html b/_autosummary/gaps.utilities.resolve_path.html new file mode 100644 index 00000000..73bd627e --- /dev/null +++ b/_autosummary/gaps.utilities.resolve_path.html @@ -0,0 +1,639 @@ + + + + + + + + + + + gaps.utilities.resolve_path — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.utilities.resolve_path#

+
+
+resolve_path(path, base_dir)[source]#
+

Resolve a file path represented by the input string.

+

This function resolves the input string if it resembles a path. +Specifically, the string will be resolved if it starts with +“./” or “..”, or it if it contains either “./” or +“..” somewhere in the string body. Otherwise, the string +is returned unchanged, so this function is safe to call on any +string, even ones that do not resemble a path. +This method delegates the “resolving” logic to +pathlib.Path.resolve(). This means the path is made +absolute, symlinks are resolved, and “..” components are +eliminated. If the path input starts with “./” or +“..”, it is assumed to be w.r.t the config directory, not +the run directory.

+
+
Parameters:
+
    +
  • path (str) – Input file path.

  • +
  • base_dir (path-like) – Base path to directory from which to resolve path string +(typically current directory).

  • +
+
+
Returns:
+

str – The resolved path.

+
+
+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.version.html b/_autosummary/gaps.version.html new file mode 100644 index 00000000..97f8863e --- /dev/null +++ b/_autosummary/gaps.version.html @@ -0,0 +1,589 @@ + + + + + + + + + + + gaps.version — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.version#

+

GAPs Version Number.

+
+ + +
+ + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.warnings.gapsCollectionWarning.html b/_autosummary/gaps.warnings.gapsCollectionWarning.html new file mode 100644 index 00000000..33574313 --- /dev/null +++ b/_autosummary/gaps.warnings.gapsCollectionWarning.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.warnings.gapsCollectionWarning — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.warnings.gapsCollectionWarning#

+
+
+exception gapsCollectionWarning(*args, **kwargs)[source]#
+

gaps Collection waring.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.warnings.gapsDeprecationWarning.html b/_autosummary/gaps.warnings.gapsDeprecationWarning.html new file mode 100644 index 00000000..21f5c07a --- /dev/null +++ b/_autosummary/gaps.warnings.gapsDeprecationWarning.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.warnings.gapsDeprecationWarning — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.warnings.gapsDeprecationWarning#

+
+
+exception gapsDeprecationWarning(*args, **kwargs)[source]#
+

gaps deprecation warning.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.warnings.gapsHPCWarning.html b/_autosummary/gaps.warnings.gapsHPCWarning.html new file mode 100644 index 00000000..a5291540 --- /dev/null +++ b/_autosummary/gaps.warnings.gapsHPCWarning.html @@ -0,0 +1,616 @@ + + + + + + + + + + + gaps.warnings.gapsHPCWarning — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.warnings.gapsHPCWarning#

+
+
+exception gapsHPCWarning(*args, **kwargs)[source]#
+

gaps HPC warning.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.warnings.gapsWarning.html b/_autosummary/gaps.warnings.gapsWarning.html new file mode 100644 index 00000000..1a94f470 --- /dev/null +++ b/_autosummary/gaps.warnings.gapsWarning.html @@ -0,0 +1,606 @@ + + + + + + + + + + + gaps.warnings.gapsWarning — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +
+

gaps.warnings.gapsWarning#

+
+
+exception gapsWarning(*args, **kwargs)[source]#
+

Generic gaps Warning.

+

Init exception and broadcast message to logger.

+
+ +
+ + +
+ + + + + +
+ + + +
+ + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/gaps.warnings.html b/_autosummary/gaps.warnings.html new file mode 100644 index 00000000..fc043cdd --- /dev/null +++ b/_autosummary/gaps.warnings.html @@ -0,0 +1,606 @@ + + + + + + + + + + + gaps.warnings — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + + + + + + + +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/batch.html b/_modules/gaps/batch.html new file mode 100644 index 00000000..c2a07b00 --- /dev/null +++ b/_modules/gaps/batch.html @@ -0,0 +1,862 @@ + + + + + + + + + + gaps.batch — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.batch

+# -*- coding: utf-8 -*-
+"""GAPs batching framework for parametric runs.
+
+Based on reV-batch.
+"""
+import os
+import copy
+import json
+import shutil
+import logging
+from warnings import warn
+from pathlib import Path
+from itertools import product
+from collections import namedtuple
+
+import pandas as pd
+
+from rex.utilities import parse_year
+
+from gaps.config import load_config, ConfigType, resolve_all_paths
+import gaps.cli.pipeline
+from gaps.pipeline import Pipeline
+from gaps.exceptions import gapsValueError, gapsConfigError
+from gaps.warnings import gapsWarning
+
+
+logger = logging.getLogger(__name__)
+
+BATCH_CSV_FN = "batch_jobs.csv"
+BatchSet = namedtuple("BatchSet", ["arg_combo", "file_set", "tag"])
+
+
+
+[docs] +class BatchJob: + """Framework for building a batched job suite. + + Based on reV-batch. + + This framework allows users to modify key-value pairs in input + configuration files based on a top-level batch config file. This + framework will create run directories for all combinations of input + parametrics and run the corresponding GAPs pipelines for each job. + """ + + def __init__(self, config): + """ + Parameters + ---------- + config : str + File path to config json or csv (str). + """ + self._job_tags = None + self._base_dir, config = _load_batch_config(config) + self._pipeline_fp = Path(config["pipeline_config"]) + self._sets = _parse_config(config) + + logger.info("Batch job initialized with %d sub jobs.", len(self._sets)) + + @property + def job_table(self): + """pd.DataFrame: Batch job summary table.""" + jobs = [] + for job_tag, (arg_comb, file_set, set_tag) in self._sets.items(): + job_info = {k: str(v) for k, v in arg_comb.items()} + job_info["set_tag"] = str(set_tag) + job_info["files"] = str(file_set) + job_info = pd.DataFrame(job_info, index=[job_tag]) + jobs.append(job_info) + + table = pd.concat(jobs) + + table.index.name = "job" + table["pipeline_config"] = self._pipeline_fp.as_posix() + + return table + + @property + def sub_dirs(self): + """list: Job sub directory paths.""" + return [self._base_dir / tag for tag in self._sets] + + def _make_job_dirs(self): + """Copy job files from the batch config dir into sub job dirs.""" + + table = self.job_table + table.to_csv(self._base_dir / BATCH_CSV_FN) + logger.debug( + "Batch jobs list: %s", sorted(table.index.values.tolist()) + ) + logger.info("Preparing batch job directories...") + + # walk through current directory getting everything to copy + for source_dir, _, filenames in os.walk(self._base_dir): + # don't make additional copies of job sub directories. + if any(job_tag in source_dir for job_tag in self._sets): + continue + + # For each dir level, iterate through the batch arg combos + for tag, (arg_comb, mod_files, __) in self._sets.items(): + # Add the job tag to the directory path. + # This will copy config subdirs into the job subdirs + source_dir = Path(source_dir) + index = source_dir.parts.index(self._base_dir.name) + destination_dir = self._base_dir / tag + destination_dir = destination_dir.joinpath( + *source_dir.parts[index + 1 :] + ) + destination_dir.mkdir(parents=True, exist_ok=True) + + for name in filenames: + if BATCH_CSV_FN in name: + continue + _copy_batch_file( + source_dir / name, + destination_dir / name, + ) + + for fp_source in mod_files: + logger.debug( + "Copying and modifying run file %r to job: %r", + fp_source, + tag, + ) + fp_source = Path(fp_source) + fp_target = destination_dir / fp_source.name + _mod_file(fp_source, fp_target, arg_comb) + + _copy_batch_file( + self._pipeline_fp, + destination_dir / self._pipeline_fp.name, + ) + + logger.info("Batch job directories ready for execution.") + + def _run_pipelines(self, monitor_background=False): + """Run the pipeline modules for each batch job.""" + + for sub_directory in self.sub_dirs: + pipeline_config = sub_directory / self._pipeline_fp.name + if not pipeline_config.is_file(): + raise gapsConfigError( + f"Could not find pipeline config to run: " + f"{pipeline_config!r}" + ) + if monitor_background: + # pylint: disable=no-value-for-parameter + gaps.cli.pipeline.pipeline( + pipeline_config, + cancel=False, + monitor=True, + background=True, + ) + else: + Pipeline.run(pipeline_config, monitor=False) + +
+[docs] + def cancel(self): + """Cancel all pipeline modules for all batch jobs.""" + for sub_directory in self.sub_dirs: + pipeline_config = sub_directory / self._pipeline_fp.name + if pipeline_config.is_file(): + Pipeline.cancel_all(pipeline_config)
+ + +
+[docs] + def delete(self): + """Clear all of the batch sub job folders. + + Only the batch sub folders listed in the job summary csv file + in the batch config directory are deleted. + """ + + fp_job_table = self._base_dir / BATCH_CSV_FN + if not fp_job_table.exists(): + msg = ( + f"Cannot delete batch jobs without jobs summary table: " + f"{fp_job_table!r}" + ) + logger.error(msg) + raise FileNotFoundError(msg) + + job_table = pd.read_csv(fp_job_table, index_col=0) + + if job_table.index.name != "job": + msg = ( + "Cannot delete batch jobs when the batch summary table " + 'does not have "job" as the index key' + ) + raise gapsValueError(msg) + + self._remove_sub_dirs(job_table) + fp_job_table.unlink()
+ + + def _remove_sub_dirs(self, job_table): + """Remove all the sub-directories tracked in the job table.""" + for sub_dir in job_table.index: + job_dir = self._base_dir / sub_dir + if job_dir.exists(): + logger.info("Removing batch job directory: %r", sub_dir) + shutil.rmtree(job_dir) + else: + msg = "Cannot find batch job directory: {sub_dir!r}" + warn(msg, gapsWarning) + +
+[docs] + def run(self, dry_run=False, monitor_background=False): + """Run the batch job from a config file. + + Parameters + ---------- + dry_run : bool + Flag to make job directories without running. + monitor_background : bool + Flag to monitor all batch pipelines continuously + in the background. Note that the stdout/stderr will not be + captured, but you can set a pipeline "log_file" to capture + logs. + """ + self._make_job_dirs() + if dry_run: + return + + self._run_pipelines(monitor_background=monitor_background)
+
+ + + +def _load_batch_config(config_fp): + """Load and validate the batch config file (CSV or JSON)""" + base_dir = Path(config_fp).expanduser().parent.resolve() + config = _load_batch_config_to_dict(config_fp) + config = _validate_batch_config(config, base_dir) + return base_dir, config + + +def _load_batch_config_to_dict(config_fp): + """Load the batch file to dict.""" + if Path(config_fp).name.endswith(".csv"): + return _load_batch_csv(config_fp) + return load_config(config_fp, resolve_paths=False) + + +def _load_batch_csv(config_fp): + """Load batch csv file to dict.""" + table = pd.read_csv(config_fp) + table = table.where(pd.notnull(table), None) + _validate_batch_table(table) + return _convert_batch_table_to_dict(table) + + +def _validate_batch_table(table): + """Validate batch file CSV table.""" + if "set_tag" not in table or "files" not in table: + msg = 'Batch CSV config must have "set_tag" and "files" columns' + raise gapsConfigError(msg) + + set_tags_not_unique = len(table.set_tag.unique()) != len(table) + if set_tags_not_unique: + msg = 'Batch CSV config must have completely unique "set_tag" column' + raise gapsConfigError(msg) + + if "pipeline_config" not in table: + msg = ( + 'Batch CSV config must have "pipeline_config" columns specifying ' + "the pipeline config filename." + ) + raise gapsConfigError(msg) + + +def _convert_batch_table_to_dict(table): + """Convert validated batch csv file to dict.""" + sets = [] + for _, job in table.iterrows(): + job_dict = job.to_dict() + args = { + k: [v] + for k, v in job_dict.items() + if k not in ("set_tag", "files", "pipeline_config") + } + files = json.loads(job_dict["files"].replace("'", '"')) + set_config = { + "args": args, + "set_tag": job_dict["set_tag"], + "files": files, + } + sets.append(set_config) + + batch_dict = { + "logging": {"log_file": None, "log_level": "INFO"}, + "pipeline_config": table["pipeline_config"].values[0], + "sets": sets, + } + + return batch_dict + + +def _validate_batch_config(config, base_dir): + """Validate the batch config dict.""" + config = _check_pipeline(config, base_dir) + return _check_sets(config, base_dir) + + +def _check_pipeline(config, base_dir): + """Check the pipeline config file in the batch config.""" + + if "pipeline_config" not in config: + raise gapsConfigError('Batch config needs "pipeline_config" arg!') + + config["pipeline_config"] = resolve_all_paths( + config["pipeline_config"], base_dir + ) + if not Path(config["pipeline_config"]).exists(): + raise gapsConfigError( + f"Could not find the pipeline config file: " + f"{config['pipeline_config']!r}" + ) + return config + + +def _check_sets(config, base_dir): + """Check the batch sets for required inputs and valid files.""" + + if "sets" not in config: + raise gapsConfigError('Batch config needs "sets" arg!') + + batch_sets = [] + for batch_set in config["sets"]: + if not isinstance(batch_set, dict): + raise gapsConfigError("Batch sets must be dictionaries.") + if "args" not in batch_set: + raise gapsConfigError('All batch sets must have "args" key.') + if "files" not in batch_set: + raise gapsConfigError('All batch sets must have "files" key.') + batch_set["files"] = resolve_all_paths(batch_set["files"], base_dir) + for fpath in batch_set["files"]: + if not Path(fpath).exists(): + raise gapsConfigError( + "Could not find file to modify in batch jobs: {fpath!r}" + ) + batch_sets.append(batch_set) + + config["sets"] = batch_sets + return config + + +def _enumerated_product(args): + """An enumerated product function.""" + yield from zip(product(*(range(len(x)) for x in args)), product(*args)) + + +def _parse_config(config): + """Parse batch config object for useful data.""" + + sets = set() + batch_sets = {} + + for batch_set in config["sets"]: + set_tag = batch_set.get("set_tag", "") + args = batch_set["args"] + if set_tag in sets: + msg = f"Found multiple sets with the same set_tag: {set_tag!r}" + raise gapsValueError(msg) + sets.add(set_tag) + + for inds, comb in _enumerated_product(args.values()): + arg_combo = dict(zip(args, comb)) + arg_inds = dict(zip(args, inds)) + tag_arg_comb = { + k: v for k, v in arg_combo.items() if len(args[k]) > 1 + } + job_tag = _make_job_tag(set_tag, tag_arg_comb, arg_inds) + batch_sets[job_tag] = BatchSet( + arg_combo, batch_set["files"], set_tag + ) + + return batch_sets + + +def _make_job_tag(set_tag, arg_comb, arg_inds): + """Make a job tags from a unique combination of args + values.""" + + job_tag = [set_tag] if set_tag else [] + + for arg, value in arg_comb.items(): + arg_tag = "".join( + [s[0] for s in arg.split("_")] + + ( + [_format_value(value)] + if isinstance(value, (int, float)) + else [str(arg_inds[arg])] + ) + ) + + job_tag.append(arg_tag) + + return "_".join(job_tag).rstrip("_") + + +def _format_value(value): + """Format the input value as a string.""" + + value = str(value).replace(".", "") + + if parse_year(f"_{value}", option="bool"): + value = f"{value}0" + + return value + + +def _mod_file(fpath_in, fpath_out, arg_mods): + """Import and modify the contents of a json. Dump to new file.""" + config_type = ConfigType(fpath_in.name.split(".")[-1]) + config = config_type.load(fpath_in) + config_type.write(fpath_out, _mod_dict(config, arg_mods)) + + +def _mod_dict(inp, arg_mods): + """Recursively modify key/value pairs in a dictionary.""" + + out = copy.deepcopy(inp) + + if isinstance(inp, dict): + for key, val in inp.items(): + if key in arg_mods: + out[key] = _clean_arg(arg_mods[key]) + elif isinstance(val, (list, dict)): + out[key] = _mod_dict(val, arg_mods) + + elif isinstance(inp, list): + return [_mod_dict(entry, arg_mods) for entry in inp] + + return out + + +def _clean_arg(arg): + """Perform any cleaning steps required before writing an arg to a json.""" + + if not isinstance(arg, str): + return arg + + missing_bracket_pair = "{" not in arg or "}" not in arg + missing_string_quotes = "'" not in arg and '"' not in arg + if missing_bracket_pair or missing_string_quotes: + return arg + + arg = arg.replace("'", '"') + try: + return json.loads(arg) + except json.decoder.JSONDecodeError as err: + logger.exception("Could not load json string: %s", arg) + raise err + + +def _copy_batch_file(fp_source, fp_target): + """Copy a file in the batch directory into a job directory if needed.""" + if not _source_needs_copying(fp_source, fp_target): + return + + logger.debug("Copying run file %r to %r", fp_source, fp_target) + shutil.copy(fp_source, fp_target) + + +def _source_needs_copying(fp_source, fp_target): + """Determine if source needs to be copied to dest.""" + if not fp_target.exists(): + return True + return fp_source.lstat().st_mtime > fp_target.lstat().st_mtime +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/batch.html b/_modules/gaps/cli/batch.html new file mode 100644 index 00000000..a0e10bcd --- /dev/null +++ b/_modules/gaps/cli/batch.html @@ -0,0 +1,475 @@ + + + + + + + + + + gaps.cli.batch — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.batch

+# -*- coding: utf-8 -*-
+"""
+Batch Job CLI entry points.
+"""
+import click
+
+import gaps.batch
+from gaps.config import init_logging_from_config_file
+from gaps.cli.command import _WrappedCommand
+from gaps.cli.documentation import _batch_command_help
+
+
+def _batch(config_file, dry, cancel, delete, monitor_background):
+    """Execute an analysis pipeline over a parametric set of inputs"""
+    init_logging_from_config_file(config_file, background=monitor_background)
+
+    if cancel:
+        gaps.batch.BatchJob(config_file).cancel()
+    elif delete:
+        gaps.batch.BatchJob(config_file).delete()
+    else:
+        gaps.batch.BatchJob(config_file).run(
+            dry_run=dry,
+            monitor_background=monitor_background,
+        )
+
+
+
+[docs] +def batch_command(): + """Generate a batch command.""" + params = [ + click.Option( + param_decls=["--config_file", "-c"], + required=True, + type=click.Path(exists=True), + help=_batch_command_help(), + ), + click.Option( + param_decls=["--dry"], + is_flag=True, + help="Flag to do a dry run (make batch dirs and update files " + "without running the pipeline).", + ), + click.Option( + param_decls=["--cancel"], + is_flag=True, + help="Flag to cancel all jobs associated associated with the " + "``batch_jobs.csv`` file in the current batch config directory.", + ), + click.Option( + param_decls=["--delete"], + is_flag=True, + help="Flag to delete all batch job sub directories associated " + "with the ``batch_jobs.csv`` file in the current batch config " + "directory.", + ), + click.Option( + param_decls=["--monitor-background"], + is_flag=True, + help="Flag to monitor all batch pipelines continuously in the " + "background. Note that the ``stdout/stderr`` will not be " + 'captured, but you can set a pipeline ``"log_file"`` to capture ' + "logs.", + ), + ] + return _WrappedCommand( + "batch", + context_settings=None, + callback=_batch, + params=params, + help=( + "Execute an analysis pipeline over a parametric set of inputs.\n\n" + "The general structure for calling this CLI command is given " + "below (add``--help`` to print help info to the terminal)." + ), + epilog=None, + short_help=None, + options_metavar="[OPTIONS]", + add_help_option=True, + no_args_is_help=True, + hidden=False, + deprecated=False, + )
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/cli.html b/_modules/gaps/cli/cli.html new file mode 100644 index 00000000..2476117f --- /dev/null +++ b/_modules/gaps/cli/cli.html @@ -0,0 +1,615 @@ + + + + + + + + + + gaps.cli.cli — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.cli

+# -*- coding: utf-8 -*-
+"""
+Generation CLI entry points.
+"""
+from functools import partial
+
+import click
+
+from gaps import Pipeline
+from gaps.cli.batch import batch_command
+from gaps.cli.templates import template_command
+from gaps.cli.pipeline import pipeline_command, template_pipeline_config
+from gaps.cli.collect import collect
+from gaps.cli.config import from_config
+from gaps.cli.command import (
+    CLICommandFromFunction,
+    _WrappedCommand,
+)
+from gaps.cli.documentation import _main_command_help
+from gaps.cli.preprocessing import preprocess_collect_config
+from gaps.cli.status import status_command
+
+
+class _CLICommandGenerator:
+    """Generate commands from a list of configurations."""
+
+    def __init__(self, command_configs):
+        """
+
+        Parameters
+        ----------
+        command_configs : list of :class:`CLICommandFromFunction`
+            List of command configs to convert to click commands.
+        """
+        self.command_configs = command_configs
+        self.commands, self.template_configs = [], {}
+
+    def add_collect_command_configs(self):
+        """Add collect command if the function is split spatially."""
+        all_commands = []
+        for command_configuration in self.command_configs:
+            all_commands.append(command_configuration)
+            if command_configuration.add_collect:
+                collect_configuration = CLICommandFromFunction(
+                    name=f"collect-{command_configuration.name}",
+                    function=collect,
+                    split_keys=[("_out_path", "_pattern")],
+                    config_preprocessor=preprocess_collect_config,
+                )
+                all_commands.append(collect_configuration)
+        self.command_configs = all_commands
+        return self
+
+    def convert_to_commands(self):
+        """Convert all of the command configs into click commands."""
+        for command_config in self.command_configs:
+            command = as_click_command(command_config)
+            self.commands.append(command)
+            Pipeline.COMMANDS[command_config.name] = command
+            template_config = command_config.documentation.template_config
+            self.template_configs[command_config.name] = template_config
+        return self
+
+    def add_pipeline_command(self):
+        """Add pipeline command, which includes the previous commands."""
+        tpc = template_pipeline_config(self.command_configs)
+        pipeline = pipeline_command(tpc)
+        self.commands = [pipeline] + self.commands
+        self.template_configs["pipeline"] = tpc
+        return self
+
+    def add_batch_command(self):
+        """Add the batch command."""
+        self.commands.append(batch_command())
+        return self
+
+    def add_status_command(self):
+        """Add the status command."""
+        self.commands.append(status_command())
+        return self
+
+    def add_template_command(self):
+        """Add the config template command."""
+        self.commands.append(template_command(self.template_configs))
+        return self
+
+    def generate(self):
+        """Generate a list of click commands from input configurations."""
+        return (
+            self.add_collect_command_configs()
+            .convert_to_commands()
+            .add_pipeline_command()
+            .add_batch_command()
+            .add_status_command()
+            .add_template_command()
+            .commands
+        )
+
+
+
+[docs] +def as_click_command(command_config): + """Convert a GAPs command configuration object into a ``click`` command. + + Parameters + ---------- + command_config : command configuration object + Instance of a class that inherits from + :class:`~gaps.cli.command.AbstractBaseCLICommandConfiguration` + (i.e. :class:`gaps.cli.command.CLICommandFromClass`, + :class:`gaps.cli.command.CLICommandFromFunction`, etc.). + + Returns + ------- + click.command + A ``click`` command generated from the command configuration. + """ + doc = command_config.documentation + name = command_config.name + params = [ + click.Option( + param_decls=["--config_file", "-c"], + required=True, + type=click.Path(exists=True), + help=doc.config_help(name), + ) + ] + + return _WrappedCommand( + name, + context_settings=None, + callback=partial( + from_config, + command_config=command_config, + ), + params=params, + help=doc.command_help(name), + epilog=None, + short_help=None, + options_metavar="[OPTIONS]", + add_help_option=True, + no_args_is_help=True, + hidden=False, + deprecated=False, + )
+ + + +
+[docs] +def make_cli(commands, info=None): + """Create a pipeline CLI to split execution across HPC nodes. + + This function generates a CLI for a package based on the input + command configurations. Each command configuration is based around + a function that is to be executed on one or more nodes. + + Parameters + ---------- + commands : list of command configurations + List of command configs to convert to click commands. See the + :class:`~gaps.cli.command.CLICommandFromClass` or + :class:`~gaps.cli.command.CLICommandFromFunction` + documentation for a description of the input options. Each + command configuration is converted into a subcommand. Any + command configuration with ``project_points`` in the + `split_keys` argument will get a corresponding + ``collect-{command name}`` command that collects the outputs of + the spatially-distributed command. + info : dict, optional + A dictionary that contains optional info about the calling + program to include in the CLI. Allowed keys include: + + name : str + Name of program to display at the top of the CLI help. + This input is optional, but specifying it yields richer + documentation for the main CLI command. + version : str + Include program version with a ``--version`` CLI option. + + By default, ``None``. + + Returns + ------- + click.Group + A group containing the requested subcommands. This can be used + as the entrypoint to the package CLI. + """ + info = info or {} + command_generator = _CLICommandGenerator(commands) + commands = command_generator.generate() + + options = [ + click.Option( + param_decls=["-v", "--verbose"], + is_flag=True, + help="Flag to turn on debug logging. Default is not verbose.", + ), + ] + + prog_name = info.get("name") + if prog_name: + main_help = _main_command_help( + prog_name, command_generator.command_configs + ) + else: + main_help = "Command Line Interface" + + main = click.Group( + help=main_help, params=options, callback=_main_cb, commands=commands + ) + version = info.get("version") + if version is not None: + main = click.version_option(version=version)(main) + + return main
+ + + +@click.pass_context +def _main_cb(ctx, verbose): + """Set the obj and verbose settings of the commands.""" + ctx.ensure_object(dict) + ctx.obj["VERBOSE"] = verbose + ctx.max_content_width = 92 +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/collect.html b/_modules/gaps/cli/collect.html new file mode 100644 index 00000000..cfd46114 --- /dev/null +++ b/_modules/gaps/cli/collect.html @@ -0,0 +1,490 @@ + + + + + + + + + + gaps.cli.collect — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.collect

+# -*- coding: utf-8 -*-
+"""
+GAPs collection CLI entry points.
+"""
+import glob
+import logging
+from warnings import warn
+
+from rex import Resource
+from gaps import Collector
+from gaps.warnings import gapsWarning
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +def collect( + _out_path, _pattern, project_points=None, datasets=None, purge_chunks=False +): + """Run collection on local worker. + + Collect data generated across multiple nodes into a single HDF5 + file. + + Parameters + ---------- + project_points : str | list, optional + This input should represent the project points that correspond + to the *full collection* of points contained in the input HDF5 + files to be collected. You may simply point to a ProjectPoints + csv file that contains the GID's that should be collected. You + may also input the GID's as a list, though this may not be + suitable for collections with a large number of points. You may + also set this to input to ``None`` to generate a list of GID's + automatically from the input files. By default, `None`. + datasets : list of str, optional + List of dataset names to collect into the output file. If + collection is performed into multiple files (i.e. multiple input + patterns), this list can contain all relevant datasets across + all files (a warning wil be thrown, but it is safe to ignore + it). If ``None``, all datasets from the input files are + collected. By default, ``None``. + purge_chunks : bool, optional + Option to delete single-node input HDF5 files. Note that the + input files will **not** be removed if any of the datasets they + contain have not been collected, regardless of the value of this + input. By default, ``False``. + + Returns + ------- + str + Path to HDF5 file with the collected outputs. + """ + if "*" not in _pattern: + logger.info("Collect pattern has no wildcard! No collection performed") + return str(_out_path) + + logger.info( + "Collection is being run with collection pattern: %s. Target output " + "path is: %s", + _pattern, + _out_path, + ) + + datasets = _find_datasets(datasets, _pattern) + collector = Collector(_out_path, _pattern, project_points) + for dataset_name in datasets: + logger.debug("Collecting %r...", dataset_name) + collector.collect(dataset_name) + + if purge_chunks: + collector.purge_chunks() + else: + collector.move_chunks() + + return str(_out_path)
+ + + +def _find_datasets(datasets, pattern): + """Find datasets from a sample file.""" + + with Resource(glob.glob(pattern)[0]) as res: + if datasets is None: + return [ + d + for d in res + if not d.startswith("time_index") and d != "meta" + ] + missing = {d for d in datasets if d not in res} + + if any(missing): + msg = ( + f"Could not find the following datasets in the output files: " + f"{missing}. Skipping..." + ) + warn(msg, gapsWarning) + + return list(set(datasets) - missing) +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/command.html b/_modules/gaps/cli/command.html new file mode 100644 index 00000000..429588a1 --- /dev/null +++ b/_modules/gaps/cli/command.html @@ -0,0 +1,1127 @@ + + + + + + + + + + gaps.cli.command — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.command

+# -*- coding: utf-8 -*-
+# pylint: disable=too-few-public-methods
+"""
+GAPs command configuration preprocessing functions.
+"""
+from abc import ABC, abstractmethod
+from functools import cached_property, wraps
+from inspect import signature
+
+import click
+
+from gaps.cli.config import GAPS_SUPPLIED_ARGS
+from gaps.cli.documentation import CommandDocumentation
+from gaps.cli.preprocessing import split_project_points_into_ranges
+from gaps.utilities import _is_sphinx_build
+
+
+
+[docs] +class AbstractBaseCLICommandConfiguration(ABC): + """Abstract Base CLI Command representation. + + This base implementation provides helper methods to determine wether + a given command is split spatially. + + Note that ``runner`` is a required part of the interface but is not + listed as an abstract property to avoid unnecessary function + wrapping. + """ + + def __init__( + self, + name, + add_collect=False, + split_keys=None, + config_preprocessor=None, + skip_doc_params=None, + ): + self.name = name + self.add_collect = add_collect + self.split_keys = set() if split_keys is None else set(split_keys) + self.config_preprocessor = config_preprocessor or _passthrough + self.skip_doc_params = ( + set() if skip_doc_params is None else set(skip_doc_params) + ) + preprocessor_sig = signature(self.config_preprocessor) + self.preprocessor_args = preprocessor_sig.parameters.keys() + self.preprocessor_defaults = { + name: param.default + for name, param in preprocessor_sig.parameters.items() + if param.default != param.empty + } + if self.is_split_spatially: + self._add_split_on_points() + + def _add_split_on_points(self): + """Add split points preprocessing.""" + self.config_preprocessor = _split_points(self.config_preprocessor) + self.split_keys -= {"project_points"} + self.split_keys |= {"project_points_split_range"} + + @property + def is_split_spatially(self): + """bool: ``True`` if execution is split spatially across nodes.""" + return any( + key in self.split_keys + for key in ["project_points", "project_points_split_range"] + ) + + @property + @abstractmethod + def documentation(self): + """CommandDocumentation: Documentation object.""" + raise NotImplementedError
+ + + +
+[docs] +class CLICommandFromFunction(AbstractBaseCLICommandConfiguration): + """Configure a CLI command to execute a function on multiple nodes. + + This class configures a CLI command that runs a given function + across multiple nodes on an HPC. The primary utility is to split the + function execution spatially, meaning that individual nodes will run + the function on a subset of the input points. However, this + configuration also supports splitting the execution on other inputs + (in lieu of or in addition to the geospatial partitioning). + """ + + def __init__( + self, + function, + name=None, + add_collect=False, + split_keys=None, + config_preprocessor=None, + skip_doc_params=None, + ): + """ + + Parameters + ---------- + function : callable + The function to run on individual nodes. This function will + be used to generate all the documentation and template + configuration files, so it should be thoroughly documented + (using a `NumPy Style Python Docstring + <https://numpydoc.readthedocs.io/en/latest/format.html>`_). + In particular, the "Extended Summary" and the "Parameters" + section will be pulled form the docstring. + + .. WARNING:: The "Extended Summary" section may not show up + properly if the short "Summary" section is + missing. + + This function must return the path to the output file it + generates (or a list of paths if multiple output files are + generated). If no output files are generated, the function + must return ``None`` or an empty list. In order to avoid + clashing output file names with jobs on other nodes, make + sure to "request" the ``tag`` argument. This function can + "request" the following arguments by including them in the + function signature (``gaps`` will automatically pass them to + the function without any additional used input): + + tag : str + Short string unique to this job run that can be used + to generate unique output filenames, thereby + avoiding clashing output files with jobs on other + nodes. This string contains a leading underscore, + so the file name can easily be generated: + ``f"{out_file_name}{tag}.{extension}"``. + command_name : str + Name of the command being run. This is equivalent to + the ``name`` input argument. + config_file : str + Path to the configuration file specified by the + user. + project_dir : str + Path to the project directory (parent directory of + the configuration file). + job_name : str + Name of the job being run. This is typically a + combination of the project directory, the command + name, and a tag unique to a job. Note that the tag + will not be included if you request this argument + in a config preprocessing function, as the execution + has not been split into multiple jobs by that point. + out_dir : str + Path to output directory - typically equivalent to + the project directory. + out_fpath : str + Suggested path to output file. You are not required + to use this argument - it is provided purely for + convenience purposes. This argument combines the + ``out_dir`` with ``job_name`` to yield a unique + output filepath for a given node. Note that the + output filename will contain the tag. Also note that + this string *WILL NOT* contain a file-ending, so + that will have to be added by the node function. + + If your function is capable of multiprocessing, you should + also include ``max_workers`` in the function signature. + ``gaps`` will pass an integer equal to the number of + processes the user wants to run on a single node for this + value. + + .. WARNING:: The keywords ``{"max-workers", + "sites_per_worker", "memory_utilization_limit", + "timeout", "pool_size"}`` are assumed to + describe execution control. If you request any + of these as function arguments, users of your + CLI will specify them in the + `execution_control` block of the input config + file. + + Note that the ``config`` parameter is not allowed as + a function signature item. Please request all the required + keys/inputs directly instead. This function can also request + "private" arguments by including a leading underscore in the + argument name. These arguments are NOT exposed to users in + the documentation or template configuration files. Instead, + it is expected that the ``config_preprocessor`` function + fills these arguments in programmatically before the + function is distributed across nodes. See the implementation + of :func:`gaps.cli.collect.collect` and + :func:`gaps.cli.preprocessing.preprocess_collect_config` + for an example of this pattern. You can use the + ``skip_doc_params`` input below to achieve the same results + without the underscore syntax (helpful for public-facing + functions). + name : str, optional + Name of the command. This will be the name used to call the + command on the terminal. This name does not have to match + the function name. It is encouraged to use lowercase names + with dashes ("-") instead of underscores ("_") to stay + consistent with click's naming conventions. By default, + ``None``, which uses the function name as the command name + (with minor formatting to conform to ``click``-style + commands). + add_collect : bool, optional + Option to add a "collect-{command_name}" command immediately + following this command to collect the (multiple) output + files generated across nodes into a single file. The collect + command will only work if the output files the previous + command generates are HDF5 files with ``meta`` and + ``time_index`` datasets (standard ``rex`` HDF5 file + structure). If you set this option to ``True``, your run + function *must* return the path (as a string) to the output + file it generates in order for users to be able to use + ``"PIPELINE"`` as the input to the ``collect_pattern`` key + in the collection config. The path returned by this function + must also include the ``tag`` in the output file name in + order for collection to function properly (an easy way to do + this is to request ``tag`` in the function signature and + name the output file generated by the function using the + ``f"{out_file_name}{tag}.{extension}"`` format). + By default, ``False``. + split_keys : set | container, optional + A set of strings identifying the names of the config keys + that ``gaps`` should split the function execution on. To + specify geospatial partitioning in particular, ensure that + the main ``function`` has a "project_points" argument (which + accepts a :class:`gaps.project_points.ProjectPoints` + instance) and specify "project_points" as a split argument. + Users of the CLI will only need to specify the path to the + project points file and a "nodes" argument in the execution + control. To split execution on additional/other inputs, + include them by name in this input (and ensure the run + function accepts them as input). You may include tuples of + strings in this iterable as well. Tuples of strings will be + interpreted as combinations of keys whose values should be + iterated over simultaneously. For example, specifying + ``split_keys=[("a", "b")]`` and invoking with a config file + where ``a = [1, 2]`` and ``b = [3, 4]`` will run the main + function two times (on two nodes), first with the inputs + ``a=1, b=3`` and then with the inputs ``a=2, b=4``. + It is the responsibility of the developer using this class + to ensure that the user input for all ``split_keys`` is an + iterable (typically a list), and that the lengths of all + "paired" keys match. To allow non-iterable user input for + split keys, use the ``config_preprocessor`` argument to + specify a preprocessing function that converts the user + input into a list of the expected inputs. If users specify + an empty list or ``None`` for a key in ``split_keys``, then + GAPs will pass ``None`` as the value for that key (i.e. if + ``split_keys=["a"]`` and users specify ``"a": []`` in their + config, then the ``function`` will be called with + ``a=None``). If ``None``, execution is not split across + nodes, and a single node is always used for the function + call. By default, ``None``. + config_preprocessor : callable, optional + Optional function for configuration pre-processing. The + preprocessing step occurs before jobs are split across HPC + nodes, and can therefore be used to calculate the + ``split_keys`` input and/or validate that it conforms to the + requirements layed out above. At minimum, this function + should have "config" as the first parameter (which will + receive the user configuration input as a dictionary) and + *must* return the updated config dictionary. This function + can also "request" the following arguments by including them + in the function signature: + + command_name : str + Name of the command being run. This is equivalent to + the ``name`` input above. + config_file : Path + Path to the configuration file specified by the + user. + project_dir : Path + Path to the project directory (parent directory of + the configuration file). + job_name : str + Name of the job being run. This is typically a + combination of the project directory and the command + name. + out_dir : Path + Path to output directory - typically equivalent to + the project directory. + out_fpath : Path + Suggested path to output file. You are not required + to use this argument - it is provided purely for + convenience purposes. This argument combines the + ``out_dir`` with ``job_name`` to yield a unique + output filepath for a given node. Note that the + output filename *WILL NOT* contain the tag, since + the number of split nodes have not been determined + when the config pre-processing function is called. + Also note that this string *WILL NOT* contain a + file-ending, so that will have to be added by the + node function. + log_directory : Path + Path to log output directory (defaults to + project_dir / "logs"). + verbose : bool + Flag indicating wether the user has selected a DEBUG + verbosity level for logs. + + These inputs will be provided by GAPs and *will not* be + displayed to users in the template configuration files or + documentation. See + :func:`gaps.cli.preprocessing.preprocess_collect_config` + for an example. Note that the ``tag`` parameter is not + allowed as a pre-processing function signature item (the + node jobs will not have been configured before this function + executes). This function can also "request" new user inputs + that are not present in the signature of the main run + function. In this case, the documentation for these new + arguments is pulled from the ``config_preprocessor`` + function. This feature can be used to request auxillary + information from the user to fill in "private" inputs to the + main run function. See the implementation of + :func:`gaps.cli.preprocessing.preprocess_collect_config` and + :func:`gaps.cli.collect.collect` for an example of this + pattern. Do not request parameters with the same names as + any of your model function (i.e. if ``res_file`` is a model + parameter, do not request it in the preprocessing function + docstring - extract it from the config dictionary instead). + By default, ``None``. + skip_doc_params : iterable of str, optional + Optional iterable of parameter names that should be excluded + from the documentation/template configuration files. This + can be useful if your pre-processing function automatically + sets some parameters based on other user input. This option + is an alternative to the "private" arguments discussed in + the ``function`` parameter documentation above. By default, + ``None``. + """ + super().__init__( + name or function.__name__.strip("_").replace("_", "-"), + add_collect, + split_keys, + config_preprocessor, + skip_doc_params, + ) + self.runner = function + + @cached_property + def documentation(self): + """CommandDocumentation: Documentation object.""" + return CommandDocumentation( + self.runner, + self.config_preprocessor, + skip_params=GAPS_SUPPLIED_ARGS | self.skip_doc_params, + is_split_spatially=self.is_split_spatially, + )
+ + + +# pylint: disable=invalid-name,import-outside-toplevel +
+[docs] +def CLICommandConfiguration( + name, function, split_keys=None, config_preprocessor=None +): # pragma: no cover + """This class is deprecated. + + Please use :class:`CLICommandFromFunction` + """ + from warnings import warn + from gaps.warnings import gapsDeprecationWarning + + warn( + "The `CLICommandConfiguration` class is deprecated! Please use " + "`CLICommandFromFunction` instead.", + gapsDeprecationWarning, + ) + return CLICommandFromFunction( + function, + name=name, + add_collect=any( + key in split_keys + for key in ["project_points", "project_points_split_range"] + ), + split_keys=split_keys, + config_preprocessor=config_preprocessor, + )
+ + + +
+[docs] +class CLICommandFromClass(AbstractBaseCLICommandConfiguration): + """Configure a CLI command to execute an object method on multiple nodes. + + This class configures a CLI command that initializes runs a given + object and runs a particular method of that object across multiple + nodes on an HPC. The primary utility is to split the method + execution spatially, meaning that individual nodes will run the + method on a subset of the input points. However, this configuration + also supports splitting the execution on other inputs + (in lieu of or in addition to the geospatial partitioning). + """ + + def __init__( + self, + init, + method, + name=None, + add_collect=False, + split_keys=None, + config_preprocessor=None, + skip_doc_params=None, + ): + """ + + Parameters + ---------- + init : class + The class to be initialized and used to to run on individual + nodes. The class must implement ``method``. The initializer, + along with the corresponding method, will be used to + generate all the documentation and template configuration + files, so it should be thoroughly documented + (using a `NumPy Style Python Docstring + <https://numpydoc.readthedocs.io/en/latest/format.html>`_). + In particular, the "Extended Summary" (from the ``__init__`` + method *only*) and the "Parameters" section (from *both* the + ``__init__`` method and the run `method` given below) will + be pulled form the docstring. + + .. WARNING:: The "Extended Summary" section may not display + properly if the short "Summary" section in the + ``__init__`` method is missing. + + method : str + The name of a method of the ``init`` class to act as the + model function to run across multiple nodes on the HPC. This + method must return the path to the output file it generates + (or a list of paths if multiple output files are generated). + If no output files are generated, the method must return + ``None`` or an empty list. In order to avoid clashing output + file names with jobs on other nodes, make sure to "request" + the ``tag`` argument. This method can "request" the + following arguments by including them in the method + signature (``gaps`` will automatically pass them to the + method without any additional used input): + + tag : str + Short string unique to this job run that can be used + to generate unique output filenames, thereby + avoiding clashing output files with jobs on other + nodes. This string contains a leading underscore, + so the file name can easily be generated: + ``f"{out_file_name}{tag}.{extension}"``. + command_name : str + Name of the command being run. This is equivalent to + the ``name`` input argument. + config_file : str + Path to the configuration file specified by the + user. + project_dir : str + Path to the project directory (parent directory of + the configuration file). + job_name : str + Name of the job being run. This is typically a + combination of the project directory, the command + name, and a tag unique to a job. Note that the tag + will not be included if you request this argument + in a config preprocessing function, as the execution + has not been split into multiple jobs by that point. + out_dir : str + Path to output directory - typically equivalent to + the project directory. + out_fpath : str + Suggested path to output file. You are not required + to use this argument - it is provided purely for + convenience purposes. This argument combines the + ``out_dir`` with ``job_name`` to yield a unique + output filepath for a given node. Note that the + output filename will contain the tag. Also note that + this string *WILL NOT* contain a file-ending, so + that will have to be added by the node function. + + If your function is capable of multiprocessing, you should + also include ``max_workers`` in the function signature. + ``gaps`` will pass an integer equal to the number of + processes the user wants to run on a single node for this + value. + + .. WARNING:: The keywords ``{"max-workers", + "sites_per_worker", "memory_utilization_limit", + "timeout", "pool_size"}`` are assumed to + describe execution control. If you request any + of these as function arguments, users of your + CLI will specify them in the + `execution_control` block of the input config + file. + + Note that the ``config`` parameter is not allowed as + a function signature item. Please request all the required + keys/inputs directly instead. This function can also request + "private" arguments by including a leading underscore in the + argument name. These arguments are NOT exposed to users in + the documentation or template configuration files. Instead, + it is expected that the ``config_preprocessor`` function + fills these arguments in programmatically before the + function is distributed across nodes. See the implementation + of :func:`gaps.cli.collect.collect` and + :func:`gaps.cli.preprocessing.preprocess_collect_config` + for an example of this pattern. You can use the + ``skip_doc_params`` input below to achieve the same results + without the underscore syntax (helpful for public-facing + functions). + name : str, optional + Name of the command. This will be the name used to call the + command on the terminal. This name does not have to match + the function name. It is encouraged to use lowercase names + with dashes ("-") instead of underscores ("_") to stay + consistent with click's naming conventions. By default, + ``None``, which uses the ``method`` name (with minor + formatting to conform to ``click``-style commands). + add_collect : bool, optional + Option to add a "collect-{command_name}" command immediately + following this command to collect the (multiple) output + files generated across nodes into a single file. The collect + command will only work if the output files the previous + command generates are HDF5 files with ``meta`` and + ``time_index`` datasets (standard ``rex`` HDF5 file + structure). If you set this option to ``True``, your run + function *must* return the path (as a string) to the output + file it generates in order for users to be able to use + ``"PIPELINE"`` as the input to the ``collect_pattern`` key + in the collection config. The path returned by this function + must also include the ``tag`` in the output file name in + order for collection to function properly (an easy way to do + this is to request ``tag`` in the function signature and + name the output file generated by the function using the + ``f"{out_file_name}{tag}.{extension}"`` format). + By default, ``False``. + split_keys : set | container, optional + A set of strings identifying the names of the config keys + that ``gaps`` should split the function execution on. To + specify geospatial partitioning in particular, ensure that + the main ``function`` has a "project_points" argument (which + accepts a :class:`gaps.project_points.ProjectPoints` + instance) and specify "project_points" as a split argument. + Users of the CLI will only need to specify the path to the + project points file and a "nodes" argument in the execution + control. To split execution on additional/other inputs, + include them by name in this input (and ensure the run + function accepts them as input). You may include tuples of + strings in this iterable as well. Tuples of strings will be + interpreted as combinations of keys whose values should be + iterated over simultaneously. For example, specifying + ``split_keys=[("a", "b")]`` and invoking with a config file + where ``a = [1, 2]`` and ``b = [3, 4]`` will run the main + function two times (on two nodes), first with the inputs + ``a=1, b=3`` and then with the inputs ``a=2, b=4``. + It is the responsibility of the developer using this class + to ensure that the user input for all ``split_keys`` is an + iterable (typically a list), and that the lengths of all + "paired" keys match. To allow non-iterable user input for + split keys, use the ``config_preprocessor`` argument to + specify a preprocessing function that converts the user + input into a list of the expected inputs. If users specify + an empty list or ``None`` for a key in ``split_keys``, then + GAPs will pass ``None`` as the value for that key (i.e. if + ``split_keys=["a"]`` and users specify ``"a": []`` in their + config, then the ``function`` will be called with + ``a=None``). If ``None``, execution is not split across + nodes, and a single node is always used for the function + call. By default, ``None``. + config_preprocessor : callable, optional + Optional function for configuration pre-processing. The + preprocessing step occurs before jobs are split across HPC + nodes, and can therefore be used to calculate the + ``split_keys`` input and/or validate that it conforms to the + requirements layed out above. At minimum, this function + should have "config" as the first parameter (which will + receive the user configuration input as a dictionary) and + *must* return the updated config dictionary. This function + can also "request" the following arguments by including them + in the function signature: + + command_name : str + Name of the command being run. This is equivalent to + the ``name`` input above. + config_file : Path + Path to the configuration file specified by the + user. + project_dir : Path + Path to the project directory (parent directory of + the configuration file). + job_name : str + Name of the job being run. This is typically a + combination of the project directory and the command + name. + out_dir : Path + Path to output directory - typically equivalent to + the project directory. + out_fpath : Path + Suggested path to output file. You are not required + to use this argument - it is provided purely for + convenience purposes. This argument combines the + ``out_dir`` with ``job_name`` to yield a unique + output filepath for a given node. Note that the + output filename *WILL NOT* contain the tag, since + the number of split nodes have not been determined + when the config pre-processing function is called. + Also note that this string *WILL NOT* contain a + file-ending, so that will have to be added by the + node function. + log_directory : Path + Path to log output directory (defaults to + project_dir / "logs"). + verbose : bool + Flag indicating wether the user has selected a DEBUG + verbosity level for logs. + + These inputs will be provided by GAPs and *will not* be + displayed to users in the template configuration files or + documentation. See + :func:`gaps.cli.preprocessing.preprocess_collect_config` + for an example. Note that the ``tag`` parameter is not + allowed as a pre-processing function signature item (the + node jobs will not have been configured before this function + executes). This function can also "request" new user inputs + that are not present in the signature of the main run + function. In this case, the documentation for these new + arguments is pulled from the ``config_preprocessor`` + function. This feature can be used to request auxillary + information from the user to fill in "private" inputs to the + main run function. See the implementation of + :func:`gaps.cli.preprocessing.preprocess_collect_config` and + :func:`gaps.cli.collect.collect` for an example of this + pattern. Do not request parameters with the same names as + any of your model function (i.e. if ``res_file`` is a model + parameter, do not request it in the preprocessing function + docstring - extract it from the config dictionary instead). + By default, ``None``. + skip_doc_params : iterable of str, optional + Optional iterable of parameter names that should be excluded + from the documentation/template configuration files. This + can be useful if your pre-processing function automatically + sets some parameters based on other user input. This option + is an alternative to the "private" arguments discussed in + the ``function`` parameter documentation above. By default, + ``None``. + """ + super().__init__( + name or method.strip("_").replace("_", "-"), + add_collect, + split_keys, + config_preprocessor, + skip_doc_params, + ) + self.runner = init + self.run_method = method + self._validate_run_method_exists() + + def _validate_run_method_exists(self): + """Validate that the ``run_method`` is implemented.""" + return getattr(self.runner, self.run_method) + + @cached_property + def documentation(self): + """CommandDocumentation: Documentation object.""" + return CommandDocumentation( + self.runner, + getattr(self.runner, self.run_method), + self.config_preprocessor, + skip_params=GAPS_SUPPLIED_ARGS | self.skip_doc_params, + is_split_spatially=self.is_split_spatially, + )
+ + + +def _passthrough(config): + """Pass the input config through with no modifications.""" + return config + + +def _split_points(config_preprocessor): + """Add the `split_project_points_into_ranges` to preprocessing.""" + + @wraps(config_preprocessor) + def _config_preprocessor(config, *args, **kwargs): + config = config_preprocessor(config, *args, **kwargs) + config = split_project_points_into_ranges(config) + return config + + return _config_preprocessor + + +# pylint: disable=invalid-name,unused-argument +class _WrappedCommand(click.Command): + """Click Command class with an updated `get_help` function. + + References + ---------- + https://stackoverflow.com/questions/55585564/python-click-formatting-help-text + """ + + _WRAP_TEXT_REPLACED = False + + def get_help(self, ctx): + """Format the help into a string and return it.""" + if self._WRAP_TEXT_REPLACED: + return super().get_help(ctx) + + orig_wrap_test = click.formatting.wrap_text + + def wrap_text( + text, + width=78, + initial_indent="", + subsequent_indent="", + preserve_paragraphs=False, + ): + """Wrap text with gaps-style newline handling.""" + wrapped_text = orig_wrap_test( + text.replace("\n", "\n\n"), + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + preserve_paragraphs=True, + ) + wrapped_text = ( + wrapped_text.replace("\n\n", "\n").replace("::\n", ":\n\n") + # .replace("}\nParameters", "}\n\nParameters") + .replace("[required]", "\n[required]") + ) + if "Parameters\n----------" not in wrapped_text.replace(" ", ""): + wrapped_text = wrapped_text.replace(".\n", ".\n\n") + elif not _is_sphinx_build(): # pragma: no cover + wrapped_text = wrapped_text.replace( + "Parameters\n----------", + "\nConfig Parameters\n-----------------", + ) + + return wrapped_text + + click.formatting.wrap_text = wrap_text + self._WRAP_TEXT_REPLACED = True + return super().get_help(ctx) +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/config.html b/_modules/gaps/cli/config.html new file mode 100644 index 00000000..b3a5e2d1 --- /dev/null +++ b/_modules/gaps/cli/config.html @@ -0,0 +1,970 @@ + + + + + + + + + + gaps.cli.config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.config

+# -*- coding: utf-8 -*-
+"""
+GAPs CLI command for spatially distributed function runs.
+"""
+import json
+import logging
+from copy import deepcopy
+from warnings import warn
+from pathlib import Path
+from itertools import product
+from inspect import signature, isclass
+
+import click
+
+from rex.utilities.loggers import init_mult  # cspell:disable-line
+
+from gaps import ProjectPoints
+from gaps.status import StatusUpdates, QOSOption, HardwareOption
+from gaps.config import load_config
+from gaps.log import init_logger
+from gaps.cli.execution import kickoff_job
+from gaps.cli.documentation import EXTRA_EXEC_PARAMS
+from gaps.exceptions import gapsKeyError
+from gaps.warnings import gapsWarning
+
+logger = logging.getLogger(__name__)
+
+_CMD_LIST = [
+    "from gaps.cli.config import run_with_status_updates",
+    "from {run_func_module} import {run_func_name}",
+    'su_args = "{project_dir}", "{command}", "{job_name}"',
+    "run_with_status_updates("
+    "   {run_func_name}, {node_specific_config}, {logging_options}, su_args, "
+    "   {exclude_from_status}"
+    ")",
+]
+TAG = "_j"
+MAX_AU_BEFORE_WARNING = 10_000
+GAPS_SUPPLIED_ARGS = {
+    "tag",
+    "command_name",
+    "config_file",
+    "project_dir",
+    "job_name",
+    "out_dir",
+    "out_fpath",
+    "config",
+    "log_directory",
+    "verbose",
+}
+
+
+class _FromConfig:
+    """Utility class for running a function from a config file."""
+
+    def __init__(self, ctx, config_file, command_config):
+        """
+
+        Parameters
+        ----------
+        ctx : click.Context
+            Click context for the invoked command.
+        config_file : path-like
+            Path to input file containing key-value pairs as input to
+            function.
+        command_config : `gaps.cli.cli.CLICommandFromFunction`
+            A command configuration object containing info such as the
+            command name, run function, pre-processing function,
+            function documentation, etc.
+        """
+        self.ctx = ctx
+        self.config_file = Path(config_file).expanduser().resolve()
+        self.command_config = command_config
+        self.config = load_config(config_file)
+        self.log_directory = None
+        self.verbose = None
+        self.exec_kwargs = None
+        self.logging_options = None
+        self.exclude_from_status = None
+
+    @property
+    def project_dir(self):
+        """`Path`: Path to project directory."""
+        return self.config_file.parent
+
+    @property
+    def command_name(self):
+        """str: Name of command being run."""
+        return self.command_config.name
+
+    @property
+    def job_name(self):
+        """str: Name of job being run."""
+        return "_".join(
+            [self.project_dir.name, self.command_name.replace("-", "_")]
+        )
+
+    def enable_logging(self):
+        """Enable logging based on config file input."""
+        self.log_directory = self.config.pop(
+            "log_directory", (self.project_dir / "logs").as_posix()
+        )
+        self.log_directory = Path(self.log_directory)
+        self.verbose = self.config.pop(
+            "log_level", "INFO"
+        ) == "DEBUG" or self.ctx.obj.get("VERBOSE", False)
+        pipe_log_file = self.log_directory / f"{self.job_name}.log"
+        init_logger(
+            stream=self.ctx.obj.get("LOG_STREAM", True),
+            level="DEBUG" if self.verbose else "INFO",
+            file=pipe_log_file.as_posix(),
+        )
+        return self
+
+    def validate_config(self):
+        """Validate the user input config file."""
+        logger.debug("Validating %r", str(self.config_file))
+        _validate_config(self.config, self.command_config.documentation)
+        return self
+
+    def preprocess_config(self):
+        """Apply preprocessing function to config file."""
+
+        preprocessor_kwargs = {
+            "config": self.config,
+            "command_name": self.command_name,
+            "config_file": self.config_file,
+            "project_dir": self.project_dir,
+            "job_name": self.job_name,
+            "out_dir": self.project_dir,
+            "out_fpath": self.project_dir / self.job_name,
+            "log_directory": self.log_directory,
+            "verbose": self.verbose,
+        }
+        extra_preprocessor_kwargs = {
+            k: self.config[k]
+            for k in self.command_config.preprocessor_args
+            if k not in preprocessor_kwargs and k in self.config
+        }
+        preprocessor_kwargs.update(extra_preprocessor_kwargs)
+        preprocessor_defaults = {
+            k: v
+            for k, v in self.command_config.preprocessor_defaults.items()
+            if k not in preprocessor_kwargs
+        }
+        preprocessor_kwargs.update(preprocessor_defaults)
+        preprocessor_kwargs = {
+            k: v
+            for k, v in preprocessor_kwargs.items()
+            if k in self.command_config.preprocessor_args
+        }
+        self.config = self.command_config.config_preprocessor(
+            **preprocessor_kwargs
+        )
+        return self
+
+    def set_exec_kwargs(self):
+        """Extract the execution control dictionary."""
+        self.exec_kwargs = {
+            "option": "local",
+            "sh_script": "",
+            "stdout_path": (self.log_directory / "stdout").as_posix(),
+        }
+
+        self.exec_kwargs.update(self.config.get("execution_control", {}))
+        extra_params = set()
+        for extra_exec_param in EXTRA_EXEC_PARAMS:
+            if extra_exec_param in self.config:
+                extra_params.add(extra_exec_param)
+                param = self.config.pop(extra_exec_param)
+                self.exec_kwargs[extra_exec_param] = param
+
+        if extra_params:
+            msg = (
+                f"Found key(s) {extra_params} outside of 'execution_control'. "
+                "Moving these keys into 'execution_control' block. To "
+                "silence this warning, please specify these keys inside of "
+                "the 'execution_control' block."
+            )
+            warn(msg, gapsWarning)
+
+        return self
+
+    def set_logging_options(self):
+        """Assemble the logging options dictionary."""
+        self.logging_options = {
+            "name": self.job_name,
+            "log_directory": self.log_directory.as_posix(),
+            "verbose": self.verbose,
+            "node": self.exec_kwargs.get("option", "local") != "local",
+        }
+        return self
+
+    def set_exclude_from_status(self):
+        """Assemble the exclusion keyword set."""
+        self.exclude_from_status = {"project_points"}
+        self.exclude_from_status |= set(
+            self.config.pop("exclude_from_status", set())
+        )
+        self.exclude_from_status = [
+            key.lower()
+            for key in self.exclude_from_status
+            if key.lower() != "tag"
+        ]
+        return self
+
+    def prepare_context(self):
+        """Add required key-val;ue pairs to context object."""
+        self.ctx.obj["COMMAND_NAME"] = self.command_name
+        self.ctx.obj["OUT_DIR"] = self.project_dir
+        return self
+
+    def log_job_info(self):
+        """Log information about job submission"""
+        logger.info(
+            "Running %s from config file: %r",
+            self.command_name,
+            str(self.config_file),
+        )
+        logger.info("Target output directory: %r", str(self.project_dir))
+        logger.info("Target logging directory: %r", str(self.log_directory))
+        return self
+
+    def kickoff_jobs(self):
+        """Kickoff jobs across nodes based on config and run function."""
+        keys_to_run, lists_to_run = self._keys_and_lists_to_run()
+
+        jobs = sorted(product(*lists_to_run))
+        num_jobs_submit = len(jobs)
+        self._warn_about_excessive_au_usage(num_jobs_submit)
+        n_zfill = len(str(max(0, num_jobs_submit - 1)))
+        extra_exec_args = {}
+        for param in EXTRA_EXEC_PARAMS:
+            if param in self.exec_kwargs:
+                extra_exec_args[param] = self.exec_kwargs.pop(param)
+        for node_index, values in enumerate(jobs):
+            if num_jobs_submit > 1:
+                tag = f"{TAG}{str(node_index).zfill(n_zfill)}"
+            else:
+                tag = ""
+            self.ctx.obj["NAME"] = job_name = f"{self.job_name}{tag}"
+            node_specific_config = deepcopy(self.config)
+            node_specific_config.pop("execution_control", None)
+            node_specific_config.update(
+                {
+                    "tag": tag,
+                    "command_name": self.command_name,
+                    "config_file": self.config_file.as_posix(),
+                    "project_dir": self.project_dir.as_posix(),
+                    "job_name": job_name,
+                    "out_dir": self.project_dir.as_posix(),
+                    "out_fpath": (self.project_dir / job_name).as_posix(),
+                    "run_method": getattr(
+                        self.command_config, "run_method", None
+                    ),
+                }
+            )
+            node_specific_config.update(extra_exec_args)
+
+            for key, val in zip(keys_to_run, values):
+                if isinstance(key, str):
+                    node_specific_config.update({key: val})
+                else:
+                    node_specific_config.update(dict(zip(key, val)))
+
+            cmd = "; ".join(_CMD_LIST).format(
+                run_func_module=self.command_config.runner.__module__,
+                run_func_name=self.command_config.runner.__name__,
+                node_specific_config=as_script_str(node_specific_config),
+                project_dir=self.project_dir.as_posix(),
+                logging_options=as_script_str(self.logging_options),
+                exclude_from_status=as_script_str(self.exclude_from_status),
+                command=self.command_name,
+                job_name=job_name,
+            )
+            cmd = f"python -c {cmd!r}"
+            kickoff_job(self.ctx, cmd, deepcopy(self.exec_kwargs))
+
+        return self
+
+    def _keys_and_lists_to_run(self):
+        """Compile run lists based on `command_config.split_keys` input."""
+        keys_to_run = []
+        lists_to_run = []
+        keys = sorted(self.command_config.split_keys, key=_project_points_last)
+        for key_group in keys:
+            keys_to_run.append(key_group)
+            if isinstance(key_group, str):
+                lists_to_run.append(self.config.get(key_group) or [None])
+            else:
+                lists_to_run.append(
+                    list(
+                        zip(*[self.config.get(k) or [None] for k in key_group])
+                    )
+                )
+        return keys_to_run, lists_to_run
+
+    def _warn_about_excessive_au_usage(self, num_jobs):
+        """Warn if max job runtime exceeds AU threshold"""
+        try:
+            max_walltime_per_job = float(self.exec_kwargs.get("walltime"))
+        except (TypeError, ValueError):
+            return
+
+        qos = self.exec_kwargs.get("qos") or str(QOSOption.UNSPECIFIED)
+        try:
+            qos_charge_factor = QOSOption(str(qos)).charge_factor
+        except ValueError:
+            qos_charge_factor = 1
+
+        hardware = self.exec_kwargs.get("option", "local")
+        try:
+            hardware_charge_factor = HardwareOption(hardware).charge_factor
+        except ValueError:
+            return
+
+        max_au_usage = int(
+            num_jobs
+            * float(max_walltime_per_job)
+            * qos_charge_factor
+            * hardware_charge_factor
+        )
+        if max_au_usage > MAX_AU_BEFORE_WARNING:
+            msg = f"Job may use up to {max_au_usage:,} AUs!"
+            warn(msg, gapsWarning)
+
+    def run(self):
+        """Run the entire config pipeline."""
+        return (
+            self.enable_logging()
+            .validate_config()
+            .log_job_info()
+            .preprocess_config()
+            .set_exec_kwargs()
+            .set_logging_options()
+            .set_exclude_from_status()
+            .prepare_context()
+            .kickoff_jobs()
+        )
+
+
+
+[docs] +@click.pass_context +def from_config(ctx, config_file, command_config): + """Run command from a config file.""" + _FromConfig(ctx, config_file, command_config).run()
+ + + +def _validate_config(config, documentation): + """Ensure required keys exist and warn user about extra keys in config.""" + _ensure_required_args_exist(config, documentation) + _warn_about_extra_args(config, documentation) + + +def _ensure_required_args_exist(config, documentation): + """Make sure that args required for func to run exist in config.""" + missing = { + name for name in documentation.required_args if name not in config + } + + missing = _mark_extra_exec_params_missing_if_needed( + documentation, config, missing + ) + + if any(missing): + msg = ( + f"The following required keys are missing from the configuration " + f"file: {missing}" + ) + raise gapsKeyError(msg) + + +def _mark_extra_exec_params_missing_if_needed(documentation, config, missing): + """Add extra exec params as missing if not found in `execution_control.""" + exec_control = config.get("execution_control", {}) + for param in EXTRA_EXEC_PARAMS: + if documentation.param_required(param): + if param not in config and param not in exec_control: + missing |= {param} + return missing + + +def _warn_about_extra_args(config, documentation): + """Warn user about extra unused keys in the config file.""" + extra = { + name + for name in config.keys() + if not _param_in_sig(name, documentation) + } + extra -= {"execution_control", "project_points_split_range"} + if any(extra): + msg = ( + "Found unused keys in the configuration file: %s. To silence " + "this warning, please remove these keys from the input " + "configuration file." + ) + warn(msg % extra, gapsWarning) + + +def _param_in_sig(param, documentation): + """Determine if ``name`` is an argument in any func signatures""" + return any( + param in _public_args(signature) + for signature in documentation.signatures + ) + + +def _public_args(func_signature): + """Gather set of all "public" function args.""" + return { + param + for param in func_signature.parameters.keys() + if not param.startswith("_") + } + + +def _project_points_last(key): + """Sorting function that always puts "project_points_split_range" last.""" + if isinstance(key, str): + if key.casefold() == "project_points_split_range": + return (chr(0x10FFFF),) # PEP 393 + return (key,) + return key + + +
+[docs] +def as_script_str(input_): + """Convert input to how it would appear in a python script. + + Essentially this means the input is dumped to json format with some + minor replacements (e.g. null -> None, etc.). Importantly, all + string inputs are wrapped in double quotes. + + Parameters + ---------- + input_ : obj + Any object that can be serialized with :func:`json.dumps`. + + Returns + ------- + str + Input object as it would appear in a python script. + + Examples + -------- + >>> as_script_str("a") + "a" + + >>> as_script_str({"a": None, "b": True, "c": False, "d": 3, + ... "e": [{"t": "hi"}]}) + {"a": None, "b": True, "c": False, "d": 3, "e": [{"t": "hi"}]} + """ + return ( + json.dumps(input_) + .replace("null", "None") + .replace("false", "False") + .replace("true", "True") + )
+ + + +
+[docs] +def run_with_status_updates( + run_func, config, logging_options, status_update_args, exclude +): + """Run a function and write status updated before/after execution. + + Parameters + ---------- + run_func : callable + A function to run. + config : dict + Dictionary of node-specific inputs to `run_func`. + logging_options : dict + Dictionary of logging options containing at least the following + key-value pairs: + + name : str + Job name; name of log file. + log_directory : path-like + Path to log file directory. + verbose : bool + Option to turn on debug logging. + node : bool + Flag for whether this is a node-level logger. If this is + a node logger, and the log level is info, the log_file + will be `None` (sent to stdout). + + status_update_args : iterable + An iterable containing the first three initializer arguments for + :class:`StatusUpdates`. + exclude : collection | None + A collection (list, set, dict, etc.) of keys that should be + excluded from the job status file that is written before/after + the function runs. + """ + + # initialize loggers for multiple modules + init_mult( # cspell:disable-line + logging_options["name"], + logging_options["log_directory"], + modules=[run_func.__module__.split(".")[0], "gaps", "rex"], + verbose=logging_options["verbose"], + node=logging_options["node"], + ) + + run_kwargs = node_kwargs(run_func, config) + exclude = exclude or set() + job_attrs = { + key: value for key, value in run_kwargs.items() if key not in exclude + } + status_update_args = *status_update_args, job_attrs + with StatusUpdates(*status_update_args) as status: + out = run_func(**run_kwargs) + if method := config.get("run_method"): + func = getattr(out, method) + run_kwargs = node_kwargs(func, config) + status.job_attrs.update( + { + key: value + for key, value in run_kwargs.items() + if key not in exclude + } + ) + out = func(**run_kwargs) + status.out_file = out
+ + + +
+[docs] +def node_kwargs(run_func, config): + """Compile the function inputs arguments for a particular node. + + Parameters + ---------- + run_func : callable + A function to run. + config : dict + Dictionary of node-specific inputs to `run_func`. + logging_options : dict + Dictionary of logging options containing at least the following + key-value pairs: + + name : str + Job name; name of log file. + log_directory : path-like + Path to log file directory. + verbose : bool + Option to turn on debug logging. + node : bool + Flag for whether this is a node-level logger. If this is + a node logger, and the log level is info, the log_file + will be `None` (sent to stdout). + + + Returns + ------- + dict + Function run kwargs to be used on this node. + """ + + split_range = config.pop("project_points_split_range", None) + if split_range is not None: + config["project_points"] = ProjectPoints.from_range( + split_range, config["project_points"] + ) + + sig = signature(run_func) + run_kwargs = { + k: v for k, v in config.items() if k in sig.parameters.keys() + } + verb = "Initializing" if isclass(run_func) else "Running" + logger.debug("%s %r with kwargs: %s", verb, run_func.__name__, run_kwargs) + return run_kwargs
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/documentation.html b/_modules/gaps/cli/documentation.html new file mode 100644 index 00000000..9b816e30 --- /dev/null +++ b/_modules/gaps/cli/documentation.html @@ -0,0 +1,1125 @@ + + + + + + + + + + gaps.cli.documentation — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.documentation

+# -*- coding: utf-8 -*-
+"""
+CLI documentation utilities.
+"""
+from copy import deepcopy
+from itertools import chain
+from functools import lru_cache
+from inspect import signature, isclass
+
+from numpydoc.docscrape import NumpyDocString
+
+from gaps.config import config_as_str_for_docstring, ConfigType
+from gaps.utilities import _is_sphinx_build
+
+
+DEFAULT_EXEC_VALUES = {
+    "option": "local",
+    "allocation": "[REQUIRED IF ON HPC]",
+    "walltime": "[REQUIRED IF ON HPC]",
+    "qos": "normal",
+    "memory": None,
+    "nodes": 1,
+    "queue": None,
+    "feature": None,
+    "conda_env": None,
+    "module": None,
+    "sh_script": None,
+}
+
+EXTRA_EXEC_PARAMS = {
+    "max_workers": """Maximum number of parallel workers run on each node.""",
+    "sites_per_worker": """Number of sites to run in series on a worker.""",
+    "memory_utilization_limit": """Memory utilization limit (fractional).
+                Must be a value between 0 and 100. This input sets
+                how much data will be stored in-memory at any
+                given time before flushing to disk.
+    """,
+    "timeout": """Number of seconds to wait for parallel run iteration
+                to complete before early termination.
+    """,
+    "pool_size": """Number of futures to submit to a single process pool
+                for parallel futures.
+    """,
+}
+
+CONFIG_TYPES = [
+    ConfigType.JSON,
+    ConfigType.YAML,
+    ConfigType.TOML,
+]
+
+MAIN_DOC = """{name} Command Line Interface.
+
+Typically, a good place to start is to set up a {name} job with a pipeline
+config that points to several {name} modules that you want to run in serial.
+
+To begin, you can generate some template configuration files using::
+
+    $ {name} template-configs
+
+By default, this generates template JSON configuration files, though you
+can request JSON5, YAML, or TOML configuration files instead. You can run
+``$ {name} template-configs --help`` on the command line to see all available
+options for the ``template-configs`` command. Once the template configuration
+files have been generated, you can fill them out by referring to the
+module CLI documentation (if available) or the help pages of the module CLIs
+for more details on the config options for each CLI command::
+
+    {cli_help_str}
+
+After appropriately filling our the configuration files for each module you
+want to run, you can call the {name} pipeline CLI using::
+
+    $ {name} pipeline -c config_pipeline.json
+
+This command will run each pipeline step in sequence.
+
+.. Note:: You will need to re-submit the ``pipeline`` command above after
+          each completed pipeline step.
+
+To check the status of the pipeline, you can run::
+
+    $ {name} status
+
+This will print a report to the command line detailing the progress of the
+current pipeline. See ``$ {name} status --help`` for all status command
+options.
+
+If you need to parameterize the pipeline execution, you can use the ``batch``
+command. For details on setting up a batch config file, see the documentation
+or run::
+
+    $ {name} batch --help
+
+on the command line. Once you set up a batch config file, you can execute
+it using::
+
+    $ {name} batch -c config_batch.json
+
+The general structure of the {name} CLI is given below.
+"""
+
+PIPELINE_CONFIG_DOC = """
+Path to the ``pipeline`` configuration file. This argument can be
+left out, but *one and only one file* with "pipeline" in the
+name should exist in the directory and contain the config
+information. {sample_config}
+
+Parameters
+----------
+pipeline : list of dicts
+    A list of dictionaries, where each dictionary represents one
+    step in the pipeline. Each dictionary should have one
+    key-value pair, where the key is the name of the CLI
+    command to run, and the value is the path to a config
+    file containing the configuration for that command.
+logging : dict, optional
+    Dictionary containing keyword-argument pairs to pass to
+    `init_logger <https://tinyurl.com/47hakp7f/>`_. This
+    initializes logging for the submission portion of the
+    pipeline. Note, however, that each step (command) will
+    **also** record the submission step log output to a
+    common "project" log file, so it's only ever necessary
+    to use this input if you want a different (lower) level
+    of verbosity than the `log_level` specified in the
+    config for the step of the pipeline being executed.
+
+"""
+
+BATCH_CONFIG_DOC = """
+Path to the ``batch`` configuration file. {sample_config}
+
+Parameters
+----------
+pipeline_config : str
+    Path to the pipeline configuration defining the commands to
+    run for every parametric set.
+sets : list of dicts
+    A list of dictionaries, where each dictionary defines a
+    "set" of parametric runs. Each dictionary should have
+    the following keys:
+
+        args : dict
+            A dictionary defining the arguments across all input
+            configuration files to parameterize. Each argument
+            to be parametrized should be a key in this
+            dictionary, and the value should be a list of the
+            parameter values to run for this argument.
+
+            {batch_args_dict}
+
+            Remember that the keys in the ``args`` dictionary
+            should be part of (at least) one of your other
+            configuration files.
+        files : list
+            A list of paths to the configuration files that
+            contain the arguments to be updated for every
+            parametric run. Arguments can be spread out over
+            multiple files. {batch_files}
+        set_tag : str, optional
+            Optional string defining a set tag that will prefix
+            each job tag for this set. This tag does not need to
+            include an underscore, as that is provided during
+            concatenation.
+
+
+"""
+_BATCH_ARGS_DICT = """.. tabs::
+
+                .. group-tab:: JSON/JSON5
+                    ::
+
+                        {sample_json_args_dict}
+
+                .. group-tab:: YAML
+                    ::
+
+                        {sample_yaml_args_dict}
+
+                .. group-tab:: TOML
+                    ::
+
+                        {sample_toml_args_dict}
+
+
+            This example would run a total of six pipelines, one
+            with each of the following arg combinations:
+            ::
+
+                input_constant_1=18.20, path_to_a_file="/first/path.h5"
+                input_constant_1=18.20, path_to_a_file="/second/path.h5"
+                input_constant_1=18.20, path_to_a_file="/third/path.h5"
+                input_constant_1=19.04, path_to_a_file="/first/path.h5"
+                input_constant_1=19.04, path_to_a_file="/second/path.h5"
+                input_constant_1=19.04, path_to_a_file="/third/path.h5"
+
+
+"""
+_BATCH_FILES = """For example:
+
+            .. tabs::
+
+                .. group-tab:: JSON/JSON5
+                    ::
+
+                        {sample_json_files}
+
+                .. group-tab:: YAML
+                    ::
+
+                        {sample_yaml_files}
+
+                .. group-tab:: TOML
+                    ::
+
+                        {sample_toml_files}
+
+
+"""
+
+CONFIG_DOC = """
+Path to the ``{name}`` configuration file. {sample_config}
+
+{docstring}
+
+Note that you may remove any keys with a ``null`` value if you do not intend to update them yourself.
+"""
+SAMPLE_CONFIG_DOC = """Below is a sample template config
+
+.. tabs::
+
+    .. group-tab:: JSON/JSON5
+        ::
+
+            {template_json_config}
+
+    .. group-tab:: YAML
+        ::
+
+            {template_yaml_config}
+
+    .. group-tab:: TOML
+        ::
+
+            {template_toml_config}
+
+"""
+COMMAND_DOC = """
+Execute the ``{name}`` step from a config file.
+
+{desc}
+
+The general structure for calling this CLI command is given below (add
+``--help`` to print help info to the terminal).
+
+"""
+EXEC_CONTROL_DOC = """
+Parameters
+----------
+    execution_control : dict
+        Dictionary containing execution control arguments. Allowed
+        arguments are:
+
+        :option: ({{'local', 'kestrel', 'eagle', 'peregrine'}})
+            Hardware run option. Determines the type of job
+            scheduler to use as well as the base AU cost.
+        :allocation: (str)
+            HPC project (allocation) handle.
+        :walltime: (int)
+            Node walltime request in hours.
+        :qos: (str, optional)
+            Quality-of-service specifier. On Eagle or
+            Kestrel, this should be one of {{'standby', 'normal',
+            'high'}}. Note that 'high' priority doubles the AU
+            cost. By default, ``"normal"``.
+        :memory: (int, optional)
+            Node memory request in GB. By default, ``None``, which
+            does not specify a memory limit.{n}{eep}
+        :queue: (str, optional; PBS ONLY)
+            HPC queue to submit job to. Examples include: 'debug',
+            'short', 'batch', 'batch-h', 'long', etc.
+            By default, ``None``, which uses "test_queue".
+        :feature: (str, optional)
+            Additional flags for SLURM job (e.g. "-p debug").
+            By default, ``None``, which does not specify any
+            additional flags.
+        :conda_env: (str, optional)
+            Name of conda environment to activate. By default,
+            ``None``, which does not load any environments.
+        :module: (str, optional)
+            Module to load. By default, ``None``, which does not
+            load any modules.
+        :sh_script: (str, optional)
+            Extra shell script to run before command call.
+            By default, ``None``, which does not run any
+            scripts.
+
+        Only the `option` key is required for local execution. For
+        execution on the HPC, the `allocation` and `walltime` keys are also
+        required. All other options are populated with default values,
+        as seen above.
+    log_directory : str
+        Path to directory where logs should be written. Path can be relative
+        and does not have to exist on disk (it will be created if missing).
+        By default, ``"./logs"``.
+    log_level : {{"DEBUG", "INFO", "WARNING", "ERROR"}}
+        String representation of desired logger verbosity. Suitable options
+        are ``DEBUG`` (most verbose), ``INFO`` (moderately verbose),
+        ``WARNING`` (only log warnings and errors), and ``ERROR`` (only log
+        errors). By default, ``"INFO"``.
+
+"""
+NODES_DOC = (
+    "\n        :nodes: (int, optional)"
+    "\n            Number of nodes to split the project points across. "
+    "\n            Note that the total number of requested nodes for "
+    "\n            a job may be larger than this value if the command"
+    "\n            splits across other inputs. Default is ``1``."
+)
+EXTRA_EXEC_PARAM_DOC = "\n        :{name}: ({type})\n            {desc}"
+
+
+
+[docs] +class CommandDocumentation: + """Generate documentation for a command. + + Commands are typically comprised of one or more functions. This + definition includes class initializers and object methods. + Documentation is compiled from all input functions and used to + generate CLI help docs and template configuration files. + """ + + REQUIRED_TAG = "[REQUIRED]" + + def __init__(self, *functions, skip_params=None, is_split_spatially=False): + """ + Parameters + ---------- + *functions : callables + Functions that comprise a single command for which to + generate documentation. **IMPORTANT** The extended summary + will be pulled form the first function only! + skip_params : set, optional + Set of parameter names (str) to exclude from documentation. + Typically this is because the user would not explicitly have + to specify these. By default, `None`. + is_split_spatially : bool, optional + Flag indicating wether or not this function is split + spatially across nodes. If `True`, a "nodes" option is added + to the execution control block of the generated + documentation. By default, `False`. + """ + self.signatures = [ + signature(func) for func in _as_functions(functions) + ] + self.docs = [ + NumpyDocString(func.__doc__ or "") + for func in _as_functions(functions) + ] + self.param_docs = { + p.name: p for doc in self.docs for p in doc["Parameters"] + } + self.skip_params = set() if skip_params is None else set(skip_params) + self.skip_params |= {"cls", "self"} | set(EXTRA_EXEC_PARAMS) + self.is_split_spatially = is_split_spatially + + @property + def default_exec_values(self): + """dict: Default "execution_control" config.""" + exec_vals = deepcopy(DEFAULT_EXEC_VALUES) + if not self.is_split_spatially: + exec_vals.pop("nodes", None) + for param in EXTRA_EXEC_PARAMS: + if self._param_in_func_signature(param): + exec_vals[param] = ( + self.REQUIRED_TAG + if self.param_required(param) + else self._param_value(param).default + ) + return exec_vals + + @property + def exec_control_doc(self): + """str: Execution_control documentation.""" + nodes_doc = NODES_DOC if self.is_split_spatially else "" + return EXEC_CONTROL_DOC.format( + n=nodes_doc, eep=self._extra_exec_param_doc + ) + + @property + def _extra_exec_param_doc(self): + """str: Docstring formatted with the info from the input func.""" + return "".join( + [ + self._format_extra_exec_param_doc(param_name) + for param_name in EXTRA_EXEC_PARAMS + ] + ) + + def _format_extra_exec_param_doc(self, param_name): + """Format extra exec control parameters""" + param = self.param_docs.get(param_name) + try: + return EXTRA_EXEC_PARAM_DOC.format( + name=param_name, type=param.type, desc=" ".join(param.desc) + ) + except AttributeError: + pass + + if self._param_in_func_signature(param_name): + if self.param_required(param_name): + param_type = "int" + default_text = "" + else: + default_val = self._param_value(param_name).default + if default_val is None: + param_type = "int, optional" + else: + param_type = f"{type(default_val).__name__}, optional" + default_text = f"By default, ``{default_val}``." + return EXTRA_EXEC_PARAM_DOC.format( + name=param_name, + type=param_type, + desc=" ".join([EXTRA_EXEC_PARAMS[param_name], default_text]), + ) + return "" + + @property + def required_args(self): + """set: Required parameters of the input function.""" + required_args = { + name + for sig in self.signatures + for name, param in sig.parameters.items() + if not name.startswith("_") + and param.default is param.empty + and name not in self.skip_params + } + return required_args + + @property + def template_config(self): + """dict: A template configuration file for this function.""" + config = { + "execution_control": self.default_exec_values, + "log_directory": "./logs", + "log_level": "INFO", + } + config.update( + { + x: self.REQUIRED_TAG if v.default is v.empty else v.default + for sig in self.signatures + for x, v in sig.parameters.items() + if not x.startswith("_") and x not in self.skip_params + } + ) + return config + + @property + def parameter_help(self): + """str: Parameter help for the func, including execution control.""" + exec_dict_param = [ + p + for p in NumpyDocString(self.exec_control_doc)["Parameters"] + if p.name in {"execution_control", "log_directory", "log_level"} + ] + param_only_doc = NumpyDocString("") + param_only_doc["Parameters"] = exec_dict_param + [ + p + for doc in self.docs + for p in doc["Parameters"] + if p.name in self.template_config + ] + return "\n".join(_format_lines(str(param_only_doc).split("\n"))) + + @property + def extended_summary(self): + """str: Function extended summary, with extra whitespace stripped.""" + return "\n".join( + [x.lstrip().rstrip() for x in self.docs[0]["Extended Summary"]] + ) + +
+[docs] + def config_help(self, command_name): + """Generate a config help string for a command. + + Parameters + ---------- + command_name : str + Name of command for which the config help is being + generated. + + Returns + ------- + str + Help string for the config file. + """ + if _is_sphinx_build(): + sample_config = SAMPLE_CONFIG_DOC.format( + template_json_config=config_as_str_for_docstring( + self.template_config, config_type=ConfigType.JSON + ), + template_yaml_config=config_as_str_for_docstring( + self.template_config, config_type=ConfigType.YAML + ), + template_toml_config=config_as_str_for_docstring( + self.template_config, config_type=ConfigType.TOML + ), + ) + else: + sample_config = "" + + doc = CONFIG_DOC.format( + name=command_name, + sample_config=sample_config, + docstring=self.parameter_help, + ) + return _cli_formatted(doc)
+ + +
+[docs] + def command_help(self, command_name): + """Generate a help string for a command. + + Parameters + ---------- + command_name : str + Name of command for which the help string is being + generated. + + Returns + ------- + str + Help string for the command. + """ + doc = COMMAND_DOC.format(name=command_name, desc=self.extended_summary) + return _cli_formatted(doc)
+ + + @lru_cache(maxsize=16) + def _param_value(self, param): + """Extract parameter if it exists in signature""" + for sig in self.signatures: + for name in sig.parameters: + if name == param: + return sig.parameters[param] + return None + + @lru_cache(maxsize=16) + def _param_in_func_signature(self, param): + """`True` if `param` is a param of the input function.""" + return self._param_value(param) is not None + +
+[docs] + @lru_cache(maxsize=16) + def param_required(self, param): + """Check wether a parameter is a required input for the run function. + + Parameters + ---------- + param : str + Name of parameter to check. + + Returns + ------- + bool + ``True`` if `param` is a required parameter of `func`. + """ + param = self._param_value(param) + if param is None: + return False + return param.default is param.empty
+
+ + + +def _main_command_help(prog_name, commands): + """Generate main command help from commands input.""" + cli_help_str = "\n\n ".join( + [f"$ {prog_name} --help"] + + [f"$ {prog_name} {command.name} --help" for command in commands] + ) + return MAIN_DOC.format(name=prog_name, cli_help_str=cli_help_str) + + +def _pipeline_command_help(pipeline_config): # pragma: no cover + """Generate pipeline command help from a sample config.""" + if not _is_sphinx_build(): + return _cli_formatted(PIPELINE_CONFIG_DOC.format(sample_config="")) + + template_names = [ + "template_json_config", + "template_yaml_config", + "template_toml_config", + ] + sample_config = SAMPLE_CONFIG_DOC.format( + **_format_dict(pipeline_config, template_names) + ) + doc = PIPELINE_CONFIG_DOC.format(sample_config=sample_config) + return _cli_formatted(doc) + + +def _batch_command_help(): # pragma: no cover + """Generate batch command help from a sample config.""" + if not _is_sphinx_build(): + doc = BATCH_CONFIG_DOC.format( + sample_config="", batch_args_dict="", batch_files="" + ) + return _cli_formatted(doc) + + format_inputs = {} + template_config = { + "pipeline_config": CommandDocumentation.REQUIRED_TAG, + "sets": [ + { + "args": CommandDocumentation.REQUIRED_TAG, + "files": CommandDocumentation.REQUIRED_TAG, + "set_tag": "set1", + }, + { + "args": CommandDocumentation.REQUIRED_TAG, + "files": CommandDocumentation.REQUIRED_TAG, + "set_tag": "set2", + }, + ], + } + template_names = [ + "template_json_config", + "template_yaml_config", + "template_toml_config", + ] + + format_inputs["sample_config"] = SAMPLE_CONFIG_DOC.format( + **_format_dict(template_config, template_names) + ) + + sample_args_dict = { + "args": { + "input_constant_1": [18.02, 19.04], + "path_to_a_file": [ + "/first/path.h5", + "/second/path.h5", + "/third/path.h5", + ], + } + } + sample_args_names = [ + "sample_json_args_dict", + "sample_yaml_args_dict", + "sample_toml_args_dict", + ] + format_inputs["batch_args_dict"] = _BATCH_ARGS_DICT.format( + **_format_dict(sample_args_dict, sample_args_names, batch_docs=True) + ) + + sample_files = {"files": ["./config_run.yaml", "./config_analyze.json"]} + sample_files_names = [ + "sample_json_files", + "sample_yaml_files", + "sample_toml_files", + ] + format_inputs["batch_files"] = _BATCH_FILES.format( + **_format_dict(sample_files, sample_files_names, batch_docs=True) + ) + + doc = BATCH_CONFIG_DOC.format(**format_inputs) + return _cli_formatted(doc) + + +def _format_dict(sample, names, batch_docs=False): # pragma: no cover + """Format a sample into a documentation config""" + configs = {} + for name, c_type in zip(names, CONFIG_TYPES): + configs[name] = config_as_str_for_docstring( + sample, + config_type=c_type, + num_spaces=(20 if "json" in name else 24) if batch_docs else 12, + ) + if batch_docs and "json" in name: + configs[name] = "\n".join(configs[name].split("\n")[1:-1]).lstrip() + return configs + + +def _as_functions(functions): + """Yield items from input, converting all classes to their init methods""" + for func in functions: + if isclass(func): + func = func.__init__ + yield func + + +def _line_needs_newline(line): + """Determine wether a newline should be added to the current line""" + if any( + f":{key}:" in line + for key in chain(DEFAULT_EXEC_VALUES, EXTRA_EXEC_PARAMS) + ): + return True + if not line.startswith(" "): + return True + if len(line) <= 4: + return True + if line[4] == " ": + return True + return False + + +def _format_lines(lines): + """Format docs into longer lines of text for easier wrapping in CLI""" + new_lines = [lines[0]] + current_line = [] + for line in lines[1:]: + if _line_needs_newline(line): + current_line = " ".join(current_line) + if current_line: + line = "\n".join([current_line, line]) + new_lines.append(line) + current_line = [] + else: + if current_line: + line = line.lstrip() + current_line.append(line) + + return new_lines + + +def _cli_formatted(doc): + """Apply minor formatting changes for strings when displaying to CLI""" + if not _is_sphinx_build(): + doc = doc.replace("``", "`").replace("{{", "{").replace("}}", "}") + return doc +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/execution.html b/_modules/gaps/cli/execution.html new file mode 100644 index 00000000..12842a01 --- /dev/null +++ b/_modules/gaps/cli/execution.html @@ -0,0 +1,592 @@ + + + + + + + + + + gaps.cli.execution — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.execution

+# -*- coding: utf-8 -*-
+"""
+GAPs execution CLI utils.
+"""
+import logging
+import datetime as dt
+from warnings import warn
+from inspect import signature
+
+from gaps.hpc import submit
+from gaps.status import (
+    DT_FMT,
+    Status,
+    HardwareOption,
+    StatusOption,
+    StatusField,
+    QOSOption,
+)
+from gaps.warnings import gapsWarning
+from gaps.exceptions import gapsConfigError
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +def kickoff_job(ctx, cmd, exec_kwargs): + """Kickoff a single job (a single command execution). + + Parameters + ---------- + ctx : click.Context + Context object with a `.obj` attribute that contains at least + the following keys: + + NAME : str + Job name. + OUT_DIR : path-like + Path to output directory. + COMMAND_NAME : str + Name of command being run. + + cmd : str + String form of command to kickoff. + exec_kwargs : dict + Keyword-value pairs to pass to the respective `submit` function. + These will be filtered, so they may contain extra values. If + some required inputs are missing from this dictionary, a + `gapsConfigError` is raised. + + Raises + ------ + gapsConfigError + If `exec_kwargs` is missing some arguments required by the + respective `submit` function. + """ + hardware_option = HardwareOption(exec_kwargs.pop("option", "local")) + if hardware_option.manager is None: + _kickoff_local_job(ctx, cmd) + return + + ctx.obj["MANAGER"] = hardware_option.manager + exec_kwargs = _filter_exec_kwargs( + exec_kwargs, hardware_option.manager.make_script_str, hardware_option + ) + _kickoff_hpc_job(ctx, cmd, hardware_option, **exec_kwargs)
+ + + +def _filter_exec_kwargs(kwargs, func, hardware_option): + """Filter out extra keywords and raise error if any are missing.""" + sig = signature(func) + kwargs_to_use = { + k: v for k, v in kwargs.items() if k in sig.parameters.keys() + } + extra_keys = set(kwargs) - set(kwargs_to_use) + if extra_keys: + msg = ( + f"Found extra keys in 'execution_control'! The following " + f"inputs will be ignored: {extra_keys}. To silence this warning, " + "please remove the extra keys from the 'execution_control' block." + ) + warn(msg, gapsWarning) + + required = { + name for name, p in sig.parameters.items() if p.default == p.empty + } + required -= {"self", "cmd", "name"} + missing = {k for k in required if k not in kwargs_to_use} + if missing: + msg = ( + f"The 'execution_control' block is missing the following " + f"required keys: {missing}" + ) + raise gapsConfigError(msg) + + if hardware_option.supports_categorical_qos: + qos = kwargs_to_use.get("qos", "normal") + try: + qos = QOSOption(qos) + except ValueError as err: + msg = ( + f"Requested Quality-of-service option ({qos!r}) not " + f"recognized! Available options are: " + f"{QOSOption.members_as_str()}." + ) + raise gapsConfigError(msg) from err + + kwargs_to_use["qos"] = f"{qos}" + + return kwargs_to_use + + +def _kickoff_local_job(ctx, cmd): + """Run a job (command) locally.""" + + if not _should_run(ctx): + return + + name = ctx.obj["NAME"] + command = ctx.obj["COMMAND_NAME"] + logger.info("Running %r locally with job name %r.", command, name) + logger.debug("Submitting the following command:\n%s", cmd) + Status.mark_job_as_submitted( + ctx.obj["OUT_DIR"], + command=ctx.obj["COMMAND_NAME"], + job_name=name, + replace=True, + job_attrs={ + StatusField.JOB_STATUS: StatusOption.SUBMITTED, + StatusField.HARDWARE: HardwareOption.LOCAL, + StatusField.TIME_SUBMITTED: dt.datetime.now().strftime(DT_FMT), + }, + ) + stdout, stderr = submit(cmd) + if stdout: + logger.info("Subprocess received stdout: \n%s", stdout) + if stderr: + logger.warning("Subprocess received stderr: \n%s", stderr) + msg = f"Completed job {name!r}." + logger.info(msg) + + +def _kickoff_hpc_job(ctx, cmd, hardware_option, **kwargs): + """Run a job (command) on the HPC.""" + + if not _should_run(ctx): + return + + name = ctx.obj["NAME"] + command = ctx.obj["COMMAND_NAME"] + logger.info("Running %r on HPC with job name %r.", command, name) + logger.debug("Submitting the following command:\n%s", cmd) + out = ctx.obj["MANAGER"].submit(name, cmd=cmd, **kwargs)[0] + id_msg = f" (Job ID #{out})" if out else "" + msg = f"Kicked off job {name!r}{id_msg}" + + Status.mark_job_as_submitted( + ctx.obj["OUT_DIR"], + command=ctx.obj["COMMAND_NAME"], + job_name=name, + replace=True, + job_attrs={ + StatusField.JOB_ID: out, + StatusField.HARDWARE: hardware_option, + StatusField.QOS: kwargs.get("qos") or QOSOption.UNSPECIFIED, + StatusField.JOB_STATUS: StatusOption.SUBMITTED, + StatusField.TIME_SUBMITTED: dt.datetime.now().strftime(DT_FMT), + }, + ) + logger.info(msg) + + +def _should_run(ctx): + """Determine wether a command should be run based on status.""" + name = ctx.obj["NAME"] + out_dir = ctx.obj["OUT_DIR"] + status = Status.retrieve_job_status( + out_dir, + command=ctx.obj["COMMAND_NAME"], + job_name=name, + subprocess_manager=ctx.obj.get("MANAGER"), + ) + if status == StatusOption.NOT_SUBMITTED: + return True + + if status in {StatusOption.SUCCESSFUL, StatusOption.COMPLETE}: + msg = ( + f"Job {name!r} is successful in status json found in {out_dir!r}, " + f"not re-running." + ) + logger.info(msg) + return False + + if status is not None and "fail" not in str(status).lower(): + msg = ( + f"Job {name!r} was found with status {status!r}, not resubmitting" + ) + logger.info(msg) + return False + + return True +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/pipeline.html b/_modules/gaps/cli/pipeline.html new file mode 100644 index 00000000..340f5924 --- /dev/null +++ b/_modules/gaps/cli/pipeline.html @@ -0,0 +1,561 @@ + + + + + + + + + + gaps.cli.pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.pipeline

+# -*- coding: utf-8 -*-
+"""
+GAPs pipeline CLI entry points.
+"""
+import os
+import sys
+import logging
+from pathlib import Path
+from warnings import warn
+
+import click
+import psutil
+
+from gaps import Pipeline
+from gaps.config import ConfigType, init_logging_from_config_file
+from gaps.cli.documentation import _pipeline_command_help
+from gaps.cli.command import _WrappedCommand
+from gaps.status import Status, StatusField
+from gaps.exceptions import gapsExecutionError
+from gaps.warnings import gapsWarning
+
+
+logger = logging.getLogger(__name__)
+
+
+def _can_run_background():
+    """Determine if GAPs can execute the pipeline in the background."""
+    return hasattr(os, "setsid") and hasattr(os, "fork")
+
+
+
+[docs] +def template_pipeline_config(commands): + """Generate a template pipeline config based on the commands.""" + _pipeline = [] + for command in commands: + command_name_config = command.name.replace("-", "_") + sample_config_filename = ( + f"./config_{command_name_config}.{ConfigType.JSON}" + ) + _pipeline.append({command.name: sample_config_filename}) + return { + "pipeline": _pipeline, + "logging": {"log_file": None, "log_level": "INFO"}, + }
+ + + +
+[docs] +@click.pass_context +def pipeline(ctx, config_file, cancel, monitor, background=False): + """Execute multiple steps in an analysis pipeline.""" + + if config_file is None: + config_file = [ + fp + for fp in Path(".").glob("*") + if fp.is_file() and "pipeline" in fp.name + ] + if len(config_file) != 1: + msg = ( + f"Could not determine config file - multiple (or no) files " + f" detected with 'pipeline' in the name exist: {config_file}" + ) + raise gapsExecutionError(msg) + + config_file = config_file[0] + + init_logging_from_config_file(config_file, background=background) + + if cancel: + Pipeline.cancel_all(config_file) + return + + if background: + if not _can_run_background(): + msg = ( + "Cannot run pipeline in background on system that does not " + "implement os.fork and os.setsid" + ) + raise gapsExecutionError(msg) + ctx.obj["LOG_STREAM"] = False + _kickoff_background(config_file) + + project_dir = str(Path(config_file).parent.expanduser().resolve()) + status = Status(project_dir).update_from_all_job_files(purge=False) + monitor_pid = status.get(StatusField.MONITOR_PID) + if monitor_pid is not None and psutil.pid_exists(monitor_pid): + msg = ( + f"Another pipeline in {project_dir!r} is running on monitor PID: " + f"{monitor_pid}. Not starting a new pipeline execution." + ) + warn(msg, gapsWarning) + return + + if monitor: + Status.record_monitor_pid(Path(config_file).parent, os.getpid()) + + Pipeline.run(config_file, monitor=monitor)
+ + + +# pylint: disable=no-member +def _kickoff_background(config_file): # pragma: no cover + """Kickoff a child process that runs pipeline in the background.""" + pid = os.fork() + if pid == 0: + os.setsid() # This creates a new session + Pipeline.run(config_file, monitor=True) + else: + Status.record_monitor_pid(Path(config_file).parent, pid) + click.echo( + f"Kicking off pipeline job in the background. Monitor PID: {pid}" + ) + sys.exit() + + +
+[docs] +def pipeline_command(template_config): + """Generate a pipeline command.""" + params = [ + click.Option( + param_decls=["--config_file", "-c"], + default=None, + help=_pipeline_command_help(template_config), + ), + click.Option( + param_decls=["--cancel"], + is_flag=True, + help="Flag to cancel all jobs associated with a given pipeline.", + ), + click.Option( + param_decls=["--monitor"], + is_flag=True, + help="Flag to monitor pipeline jobs continuously. Default is not " + "to monitor (kick off jobs and exit).", + ), + ] + if _can_run_background(): + params += [ + click.Option( + param_decls=["--background"], + is_flag=True, + help="Flag to monitor pipeline jobs continuously in the " + "background. Note that the stdout/stderr will not be " + "captured, but you can set a pipeline 'log_file' to " + "capture logs.", + ), + ] + + return _WrappedCommand( + "pipeline", + context_settings=None, + callback=pipeline, + params=params, + help=( + "Execute multiple steps in an analysis pipeline.\n\n" + "The general structure for calling this CLI command is given " + "below (add``--help`` to print help info to the terminal)." + ), + epilog=None, + short_help=None, + options_metavar="[OPTIONS]", + add_help_option=True, + no_args_is_help=False, + hidden=False, + deprecated=False, + )
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/preprocessing.html b/_modules/gaps/cli/preprocessing.html new file mode 100644 index 00000000..d72793d5 --- /dev/null +++ b/_modules/gaps/cli/preprocessing.html @@ -0,0 +1,545 @@ + + + + + + + + + + gaps.cli.preprocessing — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.preprocessing

+# -*- coding: utf-8 -*-
+"""
+GAPs config preprocessing functions.
+"""
+import re
+import glob
+from math import ceil
+from warnings import warn
+from collections import abc
+
+from gaps.project_points import ProjectPoints
+from gaps.cli.config import TAG
+from gaps.pipeline import parse_previous_status
+from gaps.utilities import resolve_path
+from gaps.exceptions import gapsConfigError
+
+from gaps.warnings import gapsWarning
+
+
+
+[docs] +def split_project_points_into_ranges(config): + """Split project points into ranges inside of config. + + Parameters + ---------- + config : dict + Run config. This config must have a "project_points" input that + can be used to initialize :class:`ProjectPoints`. + + Returns + ------- + dict + Run config with a "project_points_split_range" key that split + the project points into multiple ranges based on node input. + """ + project_points_file = config["project_points"] + exec_control = config.get("execution_control", {}) + if exec_control.get("option") == "local": + num_nodes = 1 + else: + num_nodes = exec_control.pop("nodes", 1) + + points = ProjectPoints(project_points_file) + sites_per_split = ceil(len(points) / num_nodes) + config["project_points_split_range"] = [ + sub_points.split_range + for sub_points in points.split(sites_per_split=sites_per_split) + ] + return config
+ + + +
+[docs] +def preprocess_collect_config( + config, project_dir, command_name, collect_pattern="PIPELINE" +): + """Pre-process collection config. + + Specifically, the "collect_pattern" key is resolved into a list of + 2-tuples, where the first element in each tuple is a path to the + collection output file, while the second element is the + corresponding filepath-pattern representing the files to be + collected into the output file. + + Parameters + ---------- + config : dict + Collection config. This config will be updated to include a + "collect_pattern" key if it doesn't already have one. If the + "collect_pattern" key exists, it can be a string, a collection, + or a mapping with a `.items()` method. + project_dir : path-like + Path to project directory. This path is used to resolve the + out filepath input from the user. + command_name : str + Name of the command being run. This is used to parse the + pipeline status for output files if + ``collect_pattern="PIPELINE"`` in the input `config`. + collect_pattern : str | list | dict, optional + Unix-style ``/filepath/pattern*.h5`` representing the files to + be collected into a single output HDF5 file. If no output file + path is specified (i.e. this input is a single pattern or a list + of patterns), the output file path will be inferred from the + pattern itself (specifically, the wildcard will be removed + and the result will be the output file path). If a list of + patterns is provided, each pattern will be collected into a + separate output file. To specify the name of the output file(s), + set this input to a dictionary where the keys are paths to the + output file (including the filename itself; relative paths are + allowed) and the values are patterns representing the files that + should be collected into the output file. If running a collect + job as part of a pipeline, this input can be set to + ``"PIPELINE"``, which will parse the output of the previous step + and generate the input file pattern and output file name + automatically. By default, ``"PIPELINE"``. + + Returns + ------- + dict + Updated collection config. + """ + files = collect_pattern + if files == "PIPELINE": + files = parse_previous_status(project_dir, command_name) + files = [re.sub(f"{TAG}\\d+", "*", fname) for fname in files] + + if isinstance(files, str): + files = [files] + + if isinstance(files, abc.Sequence): + files = {pattern.replace("*", ""): pattern for pattern in files} + + files = [ + ( + resolve_path( + out_path if out_path.startswith("/") else f"./{out_path}", + project_dir, + ), + pattern, + ) + for out_path, pattern in files.items() + ] + config["_out_path"], config["_pattern"] = zip(*_validate_patterns(files)) + return config
+ + + +def _validate_patterns(files): + """Remove any patterns that have no corresponding files.""" + + patterns_with_no_files = [] + files_to_collect = [] + for out, pattern in files: + if glob.glob(pattern): + files_to_collect.append((out, pattern)) + else: + patterns_with_no_files.append(pattern) + + if any(patterns_with_no_files): + msg = ( + f"Could not find any files for the following patterns: " + f"{patterns_with_no_files}. Skipping..." + ) + warn(msg, gapsWarning) + + if not files_to_collect: + msg = ( + "Found no files to collect! Please double check your config input " + "and verify that the files to be collected exist on disk!" + ) + raise gapsConfigError(msg) + + return files_to_collect +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/status.html b/_modules/gaps/cli/status.html new file mode 100644 index 00000000..00ed0c34 --- /dev/null +++ b/_modules/gaps/cli/status.html @@ -0,0 +1,819 @@ + + + + + + + + + + gaps.cli.status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.status

+# -*- coding: utf-8 -*-
+"""GAPs Status Monitor"""
+import datetime as dt
+from pathlib import Path
+from warnings import warn
+from itertools import chain
+
+import click
+import psutil
+import pandas as pd
+from colorama import init, Fore, Style
+from tabulate import tabulate, SEPARATING_LINE
+
+from gaps.status import (
+    DT_FMT,
+    Status,
+    StatusField,
+    StatusOption,
+    HardwareOption,
+    QOSOption,
+    _elapsed_time_as_str,
+)
+from gaps.warnings import gapsWarning
+from gaps.cli.command import _WrappedCommand
+
+
+JOB_STATUS_COL = StatusField.JOB_STATUS.value
+FAILURE_STRINGS = ["failure", "fail", "failed", "f"]
+RUNNING_STRINGS = ["running", "run", "r"]
+SUBMITTED_STRINGS = ["submitted", "submit", "sb", "pending", "pend", "p"]
+SUCCESS_STRINGS = ["successful", "success", "s"]
+# cspell:disable-next-line
+NOT_SUBMITTED_STRINGS = ["unsubmitted", "unsubmit", "u", "not_submitted", "ns"]
+FILTER_HELP = """
+Filter jobs for the requested status(es). Allowed options (case-insensitive)
+include:
+
+    - Failed: {fail_options}
+    - Running: {running_options}
+    - Submitted: {submitted_options}
+    - Success: {success_options}
+    - Not submitted: {ns_options}
+
+Multiple status keys can be specified by repeating this option
+(e.g. :code:`-s status1 -s status2 ...`). By default, all status values are
+displayed.
+"""
+STATUS_HELP = """
+Display the status of a project FOLDER.
+
+By default, the status of the current working directory is displayed.
+
+The general structure for calling this CLI command is given below
+(add``--help`` to print help info to the terminal)."
+"""
+
+
+def _extract_qos_charge_factor(option, enum_class):
+    """Get the charge factor of a value in a row"""
+    try:
+        return enum_class(str(option)).charge_factor
+    except ValueError:
+        return 1
+
+
+def _filter_df_for_status(df, status_request):
+    """Check for a specific status."""
+
+    filter_statuses = set()
+    for request in status_request:
+        request = request.lower()
+        if request in FAILURE_STRINGS:
+            filter_statuses |= {StatusOption.FAILED}
+        elif request in SUCCESS_STRINGS:
+            filter_statuses |= {StatusOption.SUCCESSFUL}
+        elif request in RUNNING_STRINGS:
+            filter_statuses |= {StatusOption.RUNNING}
+        elif request in SUBMITTED_STRINGS:
+            filter_statuses |= {StatusOption.SUBMITTED}
+        elif request in NOT_SUBMITTED_STRINGS:
+            filter_statuses |= {StatusOption.NOT_SUBMITTED}
+        else:
+            msg = (
+                f"Requested status not recognized: {status_request!r}. "
+                "No additional filtering performed!"
+            )
+            warn(msg, gapsWarning)
+
+    df = df[df[StatusField.JOB_STATUS].isin(filter_statuses)]
+    return df.copy()
+
+
+def _calculate_runtime_stats(df):
+    """Calculate df runtime statistics"""
+
+    run_times_seconds = df[StatusField.RUNTIME_SECONDS]
+    if run_times_seconds.isna().any():
+        return pd.DataFrame()
+    runtime_stats = pd.DataFrame(run_times_seconds.astype(float).describe())
+    runtime_stats = runtime_stats.T[["min", "mean", "50%", "max"]]
+    runtime_stats = runtime_stats.rename(columns={"50%": "median"})
+    # runtime_stats = runtime_stats.rename(
+    #     columns={
+    #         col: f"{Fore.MAGENTA}{col}{Style.RESET_ALL}"
+    #         for col in runtime_stats.columns
+    #     }
+    # )
+    runtime_stats.index = ["Node runtime"]
+    runtime_stats = runtime_stats.T
+    runtime_stats["Node runtime"] = runtime_stats["Node runtime"].apply(
+        _elapsed_time_as_str
+    )
+    return pd.DataFrame(runtime_stats).reset_index()
+
+
+def _calculate_walltime(df):
+    """Calculate total project walltime"""
+    all_jobs_failed = (df[StatusField.JOB_STATUS] == StatusOption.FAILED).all()
+    all_end_times_missing = df[StatusField.TIME_END].isna().all()
+    if all_jobs_failed and all_end_times_missing:
+        return 0
+
+    start_time = df[StatusField.TIME_SUBMITTED].fillna(
+        dt.datetime.now().strftime(DT_FMT)
+    )
+    start_time = pd.to_datetime(start_time, format=DT_FMT).min()
+
+    end_time = df[StatusField.TIME_END].fillna(
+        dt.datetime.now().strftime(DT_FMT)
+    )
+    end_time = pd.to_datetime(end_time, format=DT_FMT).max()
+    return (end_time - start_time).total_seconds()
+
+
+def _calculate_aus(df):
+    """Calculate the number of AU's spent by jobs"""
+    run_times_seconds = df[StatusField.RUNTIME_SECONDS]
+
+    hardware_charge_factors = df[StatusField.HARDWARE].apply(
+        _extract_qos_charge_factor, enum_class=HardwareOption
+    )
+    qos_charge_factors = df[StatusField.QOS].apply(
+        _extract_qos_charge_factor, enum_class=QOSOption
+    )
+    aus_used = (
+        run_times_seconds / 3600 * hardware_charge_factors * qos_charge_factors
+    )
+    return int(aus_used.sum())
+
+
+def _color_string(string):
+    """Color string value based on status option"""
+    if string == StatusOption.FAILED:
+        string = f"{Fore.RED}{string}{Style.RESET_ALL}"
+    elif string == StatusOption.SUCCESSFUL:
+        string = f"{Fore.GREEN}{string}{Style.RESET_ALL}"
+    elif string == StatusOption.RUNNING:
+        string = f"{Fore.BLUE}{string}{Style.RESET_ALL}"
+    else:
+        string = f"{Fore.YELLOW}{string}{Style.RESET_ALL}"
+    return string
+
+
+def _print_intro(print_folder, commands, status, monitor_pid):
+    """Print intro including project folder and command/status filters."""
+    extras = []
+    commands = ", ".join(commands or [])
+    if commands:
+        extras.append(f"commands={commands}")
+    status = ", ".join(status or [])
+    if status:
+        extras.append(f"status={status}")
+    if extras:
+        extras = "; ".join(extras)
+        extras = f" ({extras})"
+    else:
+        extras = ""
+
+    print(f"\n{Fore.CYAN}{print_folder}{extras}{Style.RESET_ALL}:")
+    if monitor_pid is not None and psutil.pid_exists(monitor_pid):
+        print(f"{Fore.MAGENTA}MONITOR PID: {monitor_pid}{Style.RESET_ALL}")
+
+
+def _print_df(df):
+    """Print main status body (table of job statuses)"""
+    df[JOB_STATUS_COL] = df[JOB_STATUS_COL].apply(_color_string)
+    df = df.fillna("--")
+
+    pdf = pd.concat([df, pd.DataFrame({JOB_STATUS_COL: [SEPARATING_LINE]})])
+    pdf = tabulate(
+        pdf,
+        showindex=True,  # cspell:disable-line
+        headers=df.columns,
+        tablefmt="simple",  # cspell:disable-line
+        disable_numparse=[3],  # cspell:disable-line
+    )
+    pdf = pdf.split("\n")
+    pdf[-1] = pdf[-1].replace(" ", "-")
+    pdf = "\n".join(pdf)
+    print(pdf)
+
+
+def _print_job_status_statistics(df):
+    """Print job status statistics."""
+    statistics_str = f"Total number of jobs: {df.shape[0]}"
+    counts = pd.DataFrame(df[JOB_STATUS_COL].value_counts()).reset_index()
+    counts = tabulate(
+        # pylint: disable=unsubscriptable-object
+        counts[["count", JOB_STATUS_COL]].values,
+        showindex=False,  # cspell:disable-line
+        headers=counts.columns,
+        tablefmt="simple",  # cspell:disable-line
+        disable_numparse=[1],  # cspell:disable-line
+    )
+    counts = "\n".join(
+        [f"  {substr.lstrip()}" for substr in counts.split("\n")[2:]]
+    )
+    print(f"{Style.BRIGHT}{statistics_str}{Style.RESET_ALL}")
+    print(counts)
+
+
+def _print_runtime_stats(runtime_stats, total_runtime_seconds):
+    """Print node runtime statistics"""
+
+    runtime_str = (
+        f"Total node runtime: "
+        f"{_elapsed_time_as_str(total_runtime_seconds)}"
+    )
+    print(f"{Style.BRIGHT}{runtime_str}{Style.RESET_ALL}")
+    if runtime_stats.empty:
+        return
+
+    runtime_stats = tabulate(
+        runtime_stats,
+        showindex=False,  # cspell:disable-line
+        headers=runtime_stats.columns,
+        tablefmt="simple",  # cspell:disable-line
+    )
+    runtime_stats = "\n".join(
+        [f"  {substr}" for substr in runtime_stats.split("\n")[2:]]
+    )
+    print(runtime_stats)
+
+
+def _print_au_usage(total_aus_used):
+    """Print the job AU usage."""
+    if total_aus_used <= 0:
+        return
+
+    au_str = f"Total AUs spent: {total_aus_used:,}"
+    print(f"{Style.BRIGHT}{au_str}{Style.RESET_ALL}")
+
+
+def _print_total_walltime(walltime):
+    """Print the total project walltime."""
+    if walltime <= 2:
+        return
+    walltime_str = (
+        f"Total project wall time (including queue and downtime "
+        f"between steps): {_elapsed_time_as_str(walltime)}"
+    )
+    print(f"{Style.BRIGHT}{walltime_str}{Style.RESET_ALL}")
+
+
+def _print_disclaimer():
+    """Print disclaimer about statistics."""
+    print(
+        f"{Style.BRIGHT}**Statistics only include shown jobs (excluding "
+        f"any previous runs or other steps)**{Style.RESET_ALL}"
+    )
+
+
+def _color_print(
+    df,
+    print_folder,
+    commands,
+    status,
+    walltime,
+    runtime_stats,
+    total_aus_used,
+    monitor_pid=None,
+    total_runtime_seconds=None,
+):
+    """Color the status portion of the print out."""
+
+    _print_intro(print_folder, commands, status, monitor_pid)
+    _print_df(df)
+    _print_job_status_statistics(df)
+
+    if total_runtime_seconds is None:
+        return
+
+    _print_runtime_stats(runtime_stats, total_runtime_seconds)
+    _print_au_usage(total_aus_used)
+    _print_total_walltime(walltime)
+    _print_disclaimer()
+
+
+
+[docs] +def main_monitor(folder, commands, status, include, recursive): + """Run the appropriate monitor functions for a folder. + + Parameters + ---------- + folder : path-like + Path to folder for which to print status. + commands : container of str + Container with the commands to display. + status : container of str + Container with the statuses to display. + include : container of str + Container of extra keys to pull from the status files. + recursive : bool + Option to perform recursive search of directories. + """ + + init() + folders = [Path(folder).expanduser().resolve()] + if recursive: + folders = chain(folders, folders[0].rglob("*")) + + for directory in folders: + if not directory.is_dir(): + continue + + pipe_status = Status(directory) + if not pipe_status: + print(f"No non-empty status file found in {str(directory)!r}. ") + continue + + include_with_runtime = list(include) + [StatusField.RUNTIME_SECONDS] + df = pipe_status.as_df( + commands=commands, include_cols=include_with_runtime + ) + if status: + df = _filter_df_for_status(df, status) + + if df.empty: + print( + f"No status data found to display for {str(directory)!r}. " + "Please check your filters and try again." + ) + continue + + runtime_stats = _calculate_runtime_stats(df) + aus_used = _calculate_aus(df) + walltime = _calculate_walltime(df) + + _color_print( + df[list(df.columns)[:-1]].copy(), + directory.name, + commands, + status, + walltime, + runtime_stats, + total_aus_used=aus_used, + monitor_pid=pipe_status.get(StatusField.MONITOR_PID), + total_runtime_seconds=df[StatusField.RUNTIME_SECONDS].sum(), + )
+ + + +
+[docs] +def status_command(): + """A status CLI command.""" + filter_help = FILTER_HELP.format( + fail_options=" ".join([f"``{s}``" for s in FAILURE_STRINGS]), + running_options=" ".join([f"``{s}``" for s in RUNNING_STRINGS]), + submitted_options=" ".join([f"``{s}``" for s in SUBMITTED_STRINGS]), + success_options=" ".join([f"``{s}``" for s in SUCCESS_STRINGS]), + ns_options=" ".join([f"``{s}``" for s in NOT_SUBMITTED_STRINGS]), + ) + + params = [ + click.Argument( + param_decls=["folder"], + default=Path.cwd(), + type=click.Path(exists=True), + ), + click.Option( + param_decls=["--commands", "-c"], + multiple=True, + default=None, + help="Filter status for the given command(s). Multiple commands " + "can be specified by repeating this option (e.g. :code:`-c " + "command1 -c command2 ...`) By default, the status of all " + "commands is displayed.", + ), + click.Option( + param_decls=["--status", "-s"], + multiple=True, + default=None, + help=filter_help, + ), + click.Option( + param_decls=["--include", "-i"], + multiple=True, + default=None, + help="Extra status keys to include in the print output for each " + "job. Multiple status keys can be specified by repeating " + "this option (e.g. :code:`-i key1 -i key2 ...`) By default, no " + "extra keys are displayed.", + ), + click.Option( + param_decls=["--recursive", "-r"], + is_flag=True, + help="Option to perform a recursive search of directories " + "(starting with the input directory). The status of every nested " + "directory is reported.", + ), + ] + + return _WrappedCommand( + "status", + context_settings=None, + callback=main_monitor, + params=params, + help=STATUS_HELP, + epilog=None, + short_help=None, + options_metavar="[OPTIONS]", + add_help_option=True, + no_args_is_help=False, + hidden=False, + deprecated=False, + )
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/cli/templates.html b/_modules/gaps/cli/templates.html new file mode 100644 index 00000000..da746484 --- /dev/null +++ b/_modules/gaps/cli/templates.html @@ -0,0 +1,508 @@ + + + + + + + + + + gaps.cli.templates — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.cli.templates

+# -*- coding: utf-8 -*-
+"""
+GAPs CLI template config generation command.
+"""
+import logging
+from functools import partial
+from warnings import warn
+
+import click
+
+from rex.utilities.loggers import init_logger
+from gaps.config import ConfigType
+from gaps.warnings import gapsWarning
+from gaps.cli.command import _WrappedCommand
+
+
+logger = logging.getLogger(__name__)
+
+
+# pylint: disable=redefined-builtin
+@click.pass_context
+def _make_template_config(ctx, commands, type, configs):
+    """Filter configs and write to file based on type."""
+    if ctx.obj.get("VERBOSE"):
+        init_logger("gaps")
+
+    configs_to_write = _filter_configs(commands, configs)
+    _write_configs(configs_to_write, ConfigType(type))
+
+
+def _filter_configs(commands, configs):
+    """Filter down to only the configs to be written."""
+    commands = commands or configs
+    configs_to_write = {}
+    missing_commands = []
+    for command in commands:
+        if command not in configs:
+            missing_commands.append(command)
+            continue
+        configs_to_write[command] = configs[command]
+
+    if missing_commands:
+        msg = (
+            f"The following commands were not found: {missing_commands}. "
+            f" Skipping..."
+        )
+        warn(msg, gapsWarning)
+
+    return configs_to_write
+
+
+def _write_configs(configs_to_write, config_type):
+    """Write out template configs."""
+    for command_name, config in configs_to_write.items():
+        sample_config_name = f"config_{command_name}.{config_type}"
+        sample_config_name = sample_config_name.replace("-", "_")
+        logger.info(
+            "Generating template config file for command %r: %r",
+            command_name,
+            sample_config_name,
+        )
+        if command_name == "pipeline" and "pipeline" in config:
+            config["pipeline"] = [
+                _update_file_types(pair, config_type)
+                for pair in config["pipeline"]
+            ]
+        config_type.write(sample_config_name, config)
+
+
+def _update_file_types(pairs, new_type):
+    """Update the file endings for all items in the pairs dict."""
+    old, new = f".{ConfigType.JSON}", f".{new_type}"
+    return {command: path.replace(old, new) for command, path in pairs.items()}
+
+
+
+[docs] +def template_command(template_configs): + """A template config generation CLI command.""" + allowed_types = " ".join(sorted(f"``{ct}``" for ct in ConfigType)) + params = [ + click.Argument( + param_decls=["commands"], + required=False, + nargs=-1, + ), + click.Option( + param_decls=["--type", "-t"], + default=f"{ConfigType.JSON}", + help=f"Configuration file type to generate. Allowed options " + f"(case-insensitive): {allowed_types}.", + show_default=True, + ), + ] + return _WrappedCommand( + "template-configs", + context_settings=None, + callback=partial( + _make_template_config, + configs=template_configs, + ), + params=params, + help=( + "Generate template config files for requested COMMANDS. If no " + "COMMANDS are given, config files for the entire pipeline are " + "generated.\n\nThe general structure for calling this CLI " + "command is given below (add``--help`` to print help info to " + "the terminal)." + ), + epilog=None, + short_help=None, + options_metavar="", + add_help_option=True, + no_args_is_help=False, + hidden=False, + deprecated=False, + )
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/collection.html b/_modules/gaps/collection.html new file mode 100644 index 00000000..0e6fef0c --- /dev/null +++ b/_modules/gaps/collection.html @@ -0,0 +1,1369 @@ + + + + + + + + + + gaps.collection — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.collection

+# -*- coding: utf-8 -*-
+"""
+Base class to handle collection of datasets across multiple .h5 files
+"""
+import sys
+import time
+import shutil
+import logging
+from pathlib import Path
+from warnings import warn
+
+import numpy as np
+import psutil
+import pandas as pd
+
+from rex import Resource, Outputs
+from gaps.log import log_versions
+from gaps.warnings import gapsCollectionWarning
+from gaps.exceptions import gapsRuntimeError
+from gaps.utilities import project_points_from_container_or_slice
+
+logger = logging.getLogger(__name__)
+
+
+class _OutputsWithAliases(Outputs):
+    """Helper class that exposes and aliases some functions."""
+
+    def create_dataset(self, *args, **kwargs):
+        """Expose `_create_dset` call."""  # cspell:disable-line
+        return self._create_dset(*args, **kwargs)  # cspell:disable-line
+
+    def get_dataset_properties(self, *args, **kwargs):
+        """Alias for `get_dset_properties`."""  # cspell:disable-line
+        return self.get_dset_properties(*args, **kwargs)  # cspell:disable-line
+
+    def get_time_index(self, *args, **kwargs):
+        """Expose `_get_time_index` call."""
+        return self._get_time_index(*args, **kwargs)
+
+    def set_meta(self, *args, **kwargs):
+        """Expose `_set_meta` call."""
+        return self._set_meta(*args, **kwargs)
+
+    def set_time_index(self, *args, **kwargs):
+        """Expose `_set_time_index` call."""
+        return self._set_time_index(*args, **kwargs)
+
+
+
+[docs] +class DatasetCollector: + """Collector for a single dataset.""" + + def __init__( + self, + h5_file, + source_files, + gids, + dataset_in, + dataset_out=None, + memory_utilization_limit=0.7, + pass_through=False, + ): + """ + Parameters + ---------- + h5_file : path-like + Path to h5_file into which dataset is to be collected. + source_files : list + List of source filepaths. + gids : list + List of gids to be collected. + dataset_in : str + Name of dataset to collect. + dataset_out : str, optional + Name of dataset into which collected data is to be written. + If `None` the name of the output dataset is assumed to match + the dataset input name. By default, `None`. + memory_utilization_limit : float, optional + Memory utilization limit (fractional). This sets how many + sites will be collected at a time. By default, `0.7`. + pass_through : bool, optional + Flag to just pass through dataset from one of the source + files, assuming all of the source files have identical + copies of this dataset. By default, `False`. + """ + self._h5_file = h5_file + self._source_files = source_files + self._gids = gids + self._pass_through = pass_through + self._dataset_in = dataset_in + self._file_gid_map = { + fp: parse_meta(fp)["gid"].values.tolist() + for fp in self._source_files + } + if dataset_out is None: + dataset_out = dataset_in + self._dataset_out = dataset_out + + tot_mem = psutil.virtual_memory().total + self._mem_avail = memory_utilization_limit * tot_mem + self._axis, self._site_mem_req = self._pre_collect() + + logger.debug( + "Available memory for collection is %.2f bytes", self._mem_avail + ) + logger.debug( + "Site memory requirement is: %.2f bytes", self._site_mem_req + ) + + @property + def gids(self): + """list: List of gids corresponding to all sites to be combined.""" + return self._gids + + @property + def duplicate_gids(self): + """bool: `True` if there are duplicate gids being collected.""" + return len(self.gids) > len(set(self.gids)) + + def _pre_collect(self): + """Run a pre-collection check and get relevant dataset attrs. + + Returns + ------- + axis : int + Axis size (1 is 1D array, 2 is 2D array). + site_mem_req : float + Memory requirement in bytes to collect a single site from + one source file. + """ + with _OutputsWithAliases(self._source_files[0], mode="r") as out: + shape, dtype, chunks = out.get_dataset_properties(self._dataset_in) + attrs = out.get_attrs(self._dataset_in) + axis = len(out[self._dataset_in].shape) + + with _OutputsWithAliases(self._h5_file, mode="a") as out: + if axis == 1: + dataset_shape = (len(out),) + elif axis == 2: + if "time_index" in out.datasets: + dataset_shape = out.shape + else: + msg = ( + "'time_index' must be combined before profiles can " + "be combined." + ) + raise gapsRuntimeError(msg) + else: + msg = ( + f"Cannot collect dataset {self._dataset_in!r} with " + f"axis {axis}" + ) + raise gapsRuntimeError(msg) + + if self._dataset_out not in out.datasets: + out.create_dataset( + self._dataset_out, + dataset_shape, + dtype, + chunks=chunks, + attrs=attrs, + ) + + site_mem_req = _get_site_mem_req(shape, dtype) + + return axis, site_mem_req + + def _get_source_gid_chunks(self, f_source): + """Split gids from a source file into chunks based on memory req. + + Parameters + ---------- + f_source : :class:`rex.Outputs` + Source file handler. + + Returns + ------- + all_source_gids : list + List of all source gids to be collected. + source_gid_chunks : list + List of source gid chunks to collect. + """ + + all_source_gids = f_source.get_meta_arr("gid") + mem_req = len(all_source_gids) * self._site_mem_req + + if mem_req > self._mem_avail: + num_chunks = 2 + while True: + source_gid_chunks = np.array_split(all_source_gids, num_chunks) + new_mem_req = len(source_gid_chunks[0]) * self._site_mem_req + if new_mem_req > self._mem_avail: + num_chunks += 1 + else: + logger.debug( + "Collecting dataset %r in %d chunks with " + "an estimated %.2f bytes in each chunk " + "(mem avail limit is %.2f bytes).", + self._dataset_in, + num_chunks, + new_mem_req, + self._mem_avail, + ) + break + else: + source_gid_chunks = [all_source_gids] + + return all_source_gids, source_gid_chunks + + def _collect_chunk( + self, all_source_gids, source_gids, f_out, f_source, fp_source + ): + """Collect one set of source gids from f_source to f_out. + + Parameters + ---------- + all_source_gids : list + List of all source gids to be collected. + source_gids : np.ndarray | list + Source gids to be collected. + f_out : rex.Outputs + Output file handler. + f_source : rex.Outputs + Source file handler. + fp_source : str + Source filepath. + """ + + out_slice, source_slice, source_indexer = self._get_chunk_indices( + all_source_gids, source_gids, fp_source + ) + + try: + if self._axis == 1: + data = f_source[self._dataset_in, source_slice] + if not all(source_indexer): + data = data[source_indexer] + f_out[self._dataset_out, out_slice] = data + + elif self._axis == 2: + data = f_source[self._dataset_in, :, source_slice] + if not all(source_indexer): + data = data[:, source_indexer] + f_out[self._dataset_out, :, out_slice] = data + + except Exception as exc: + msg = ( + f"Failed to collect {self._dataset_in!r} from source file " + f"{fp_source.name!r}." + ) + raise gapsRuntimeError(msg) from exc + + def _get_chunk_indices(self, all_source_gids, source_gids, fp_source): + """Slices and indices used for selecting source gids. + + Parameters + ---------- + all_source_gids : list + List of all source gids to be collected. + source_gids : np.ndarray | list + Source gids to be collected. This is the same as + `all_source_gids` if collection is not being done in chunks. + f_out : :class:`rex.Outputs` + Output file handler. + f_source : :class:`rex.Outputs` + Source file handler. + fp_source : str + Source filepath. + + Returns + ------- + out_slice : slice | ndarray + Slice specifying location of source data in output file. + This can also be a boolean array if source gids are not + sequential in the output file. + source_slice : slice + Slice specifying index range of source data in input file. + If collection is not being done in chunks this is just + slice(None). + source_indexer : ndarray + boolean array specifying which source gids (not just a + range) should be stored in output. + """ + source_indexer = np.isin(source_gids, self._gids) + out_slice = _get_gid_slice( + self._gids, source_gids, Path(fp_source).name + ) + + if self.duplicate_gids: + msg = "Cannot collect duplicate gids in multiple chunks" + assert len(all_source_gids) == len(source_gids), msg + out_i0 = 0 + for source_file in self._source_files: + if source_file == fp_source: + break + out_i0 += len(self._file_gid_map[source_file]) + out_i1 = out_i0 + len(self._file_gid_map[fp_source]) + out_slice = slice(out_i0, out_i1) + source_slice = slice(None) + + elif all(sorted(source_gids) == source_gids): + source_i0 = np.where(all_source_gids == np.min(source_gids))[0][0] + source_i1 = np.where(all_source_gids == np.max(source_gids))[0][0] + source_slice = slice(source_i0, source_i1 + 1) + + elif all(source_gids == all_source_gids): + source_slice = slice(None) + + else: + source_slice = np.isin(all_source_gids, source_gids) + msg = ( + "source_gids is not in ascending order or equal to " + "all_source_gids. This can cause issues with the " + "collection ordering. Please check your data carefully." + ) + logger.warning(msg) + warn(msg, gapsCollectionWarning) + + return out_slice, source_slice, source_indexer + + def _collect(self): + """Simple & robust serial collection optimized for low memory usage.""" + logger.info("Collecting %s...", self._dataset_in) + with _OutputsWithAliases(self._h5_file, mode="a") as f_out: + + if self._pass_through: + with Resource(self._source_files[0]) as f_source: + f_out[self._dataset_out] = f_source[self._dataset_in] + + else: + for f_ind, file_path in enumerate(self._source_files, start=1): + with Resource(file_path) as f_source: + chunks = self._get_source_gid_chunks(f_source) + all_source_gids, source_gid_chunks = chunks + + for source_gids in source_gid_chunks: + self._collect_chunk( + all_source_gids, + source_gids, + f_out, + f_source, + file_path, + ) + + mem = psutil.virtual_memory() + logger.debug( + "Finished collecting %r from %d out of %d " + "files. Memory utilization is %.3f GB out " + "of %.3f GB total (%.1f%% used)", + self._dataset_in, + f_ind, + len(self._source_files), + mem.used / (1024.0**3), + mem.total / (1024.0**3), + 100 * mem.used / mem.total, + ) + +
+[docs] + @classmethod + def collect_dataset( + cls, + h5_file, + source_files, + gids, + dataset_in, + dataset_out=None, + memory_utilization_limit=0.7, + pass_through=False, + ): + """Collect a dataset from multiple source files into a single file. + + Parameters + ---------- + h5_file : path-like + Path to h5_file into which dataset is to be collected. + source_files : list + List of source filepaths. + gids : list + List of gids to be collected. + dataset_in : str + Name of dataset to collect. + dataset_out : str, optional + Name of dataset into which collected data is to be written. + If `None` the name of the output dataset is assumed to match + the dataset input name. By default, `None`. + memory_utilization_limit : float, optional + Memory utilization limit (fractional). This sets how many + sites will be collected at a time. By default, `0.7`. + pass_through : bool, optional + Flag to just pass through dataset from one of the source + files, assuming all of the source files have identical + copies of this dataset. By default, `False`. + """ + collector = cls( + h5_file, + source_files, + gids, + dataset_in, + dataset_out=dataset_out, + memory_utilization_limit=memory_utilization_limit, + pass_through=pass_through, + ) + collector._collect()
+
+ + + +
+[docs] +class Collector: + """Collector of multiple source files into a single output file.""" + + def __init__( + self, h5_file, collect_pattern, project_points, clobber=False + ): + """ + Parameters + ---------- + h5_file : path-like + Path to output HDF5 file into which data will be collected. + collect_pattern : str + Unix-style /filepath/pattern*.h5 representing a list of + input files to be collected into a single HDF5 file. + project_points : str | slice | list | pandas.DataFrame | None + Project points that correspond to the full collection of + points contained in the HDF5 files to be collected. `None` + if points list is to be ignored (i.e. collect all data in + the input HDF5 files without checking that all gids are + there). + clobber : bool, optional + Flag to purge output HDF5 file if it already exists. + By default, `False`. + """ + log_versions() + self.h5_out = Path(h5_file) + self.collect_pattern = collect_pattern + if clobber and self.h5_out.exists(): + warn( + f"{h5_file} already exists and is being replaced", + gapsCollectionWarning, + ) + self.h5_out.unlink() + + self._h5_files = find_h5_files( + self.collect_pattern, ignore=self.h5_out.name + ) + if project_points is not None: + logger.debug("Parsing project points...") + self._gids = parse_project_points(project_points) + else: + self._gids = parse_gids_from_files(self._h5_files) + + self.combine_meta() + +
+[docs] + def get_dataset_shape(self, dataset_name): + """Extract dataset shape from the first file in the collection list. + + Parameters + ---------- + dataset_name : str + Dataset to be collected whose shape is in question. + + Returns + ------- + shape : tuple + Dataset shape tuple. + """ + with Resource(self.h5_files[0]) as file_: + shape = file_.shapes[dataset_name] + + return shape
+ + + @property + def h5_files(self): + """list: List of paths to HDF5 files to be combined.""" + return self._h5_files + + @property + def gids(self): + """list: List of gids corresponding to all sites to be combined.""" + return self._gids + +
+[docs] + def combine_meta(self): + """Combine meta data from input HDF5 files and write to out file.""" + + logger.info( + "Combining meta data from list of %d source files: %s", + len(self.h5_files), + self.h5_files, + ) + + with _OutputsWithAliases(self.h5_out, mode="a") as f_out: + if "meta" in f_out.datasets: + self._check_meta(f_out.meta) + else: + with Resource(self.h5_files[0]) as f_in: + global_attrs = f_in.get_attrs() + meta_attrs = f_in.get_attrs("meta") + + for key, value in global_attrs.items(): + f_out.h5.attrs[key] = value + + meta = [parse_meta(h5_file) for h5_file in self.h5_files] + + meta = pd.concat(meta, axis=0) + meta = self._check_meta(meta) + logger.info("Writing meta data with shape %s", meta.shape) + f_out.set_meta("meta", meta, attrs=meta_attrs) + + logger.debug("\t- 'meta' collected")
+ + +
+[docs] + def combine_time_index(self): + """Combine `time_index` from input HDF5 files and write to out file. + + If `time_index` is not given in the input HDF5 files, the + `time_index` in the output file is set to `None`. + """ + # If we ever become Python 3.10+ exclusive, we can use parenthesis here + # fmt: off + with _OutputsWithAliases(self.h5_files[0], mode="r") as f_source, \ + _OutputsWithAliases(self.h5_out, mode="a") as f_out: + time_index_datasets = [ + d for d in list(f_source) if d.startswith("time_index") + ] + time_index = None + for name in time_index_datasets: + time_index = f_source.get_time_index(name, slice(None)) + attrs = f_source.get_attrs(name) + f_out.set_time_index(name, time_index, attrs=attrs)
+ + + def _check_meta(self, meta): + """Validate meta. + + In particular, this method checks the combined meta against + `self._gids` to make sure all sites are present in + `self.h5_files`. + + Parameters + ---------- + meta : :class:`pd.DataFrame` + DataFrame of combined meta from all files in + `self.h5_files`. Duplicate GIDs are dropped and a warning is + raised. + """ + meta_gids = meta["gid"].values + gids = np.array(self.gids) + missing = gids[~np.in1d(gids, meta_gids)] + if any(missing): + # TODO: Write missing gids to disk to allow for automated re-run + msg = f"gids: {missing} are missing" + raise gapsRuntimeError(msg) + + if len(set(meta_gids)) != len(meta): + msg = ( + f"Meta of length {len(meta)} has {len(set(meta_gids))} " + f"unique gids! There are duplicate gids in the source " + f"file list: {self.h5_files!r}" + ) + warn(msg, gapsCollectionWarning) + + if not all(sorted(meta["gid"].values) == meta["gid"].values): + msg = ( + "Collection was run with non-ordered meta data GIDs! " + "This can cause issues with the collection ordering. " + "Please check your data carefully." + ) + warn(msg, gapsCollectionWarning) + + meta = meta.reset_index(drop=True) + + return meta + +
+[docs] + def purge_chunks(self): + """Remove chunked files from a directory. + + Warns + ----- + gapsCollectionWarning + If some datasets have not been collected. + + Warnings + -------- + This function WILL NOT delete files if any datasets were not + collected. + """ + + with Resource(self.h5_out) as out: + collected_datasets = out.datasets + + with Resource(self.h5_files[0]) as out: + source_datasets = out.datasets + + missing = [d for d in source_datasets if d not in collected_datasets] + + if any(missing): + msg = ( + f"Not purging chunked output files. These datasets " + f"have not been collected: {missing}" + ) + warn(msg, gapsCollectionWarning) + return + + for fpath in self.h5_files: + fpath.unlink() + + logger.info("Purged chunk files from %s", self.collect_pattern)
+ + +
+[docs] + def move_chunks(self, sub_dir="chunk_files"): + """Move chunked files from a directory to a sub-directory. + + Parameters + ---------- + sub_dir : path-like, optional + Sub directory name to move chunks to. By default, + `"chunk_files"`. + """ + for fpath in self.h5_files: + new_dir = fpath.parent / sub_dir + new_dir.mkdir(parents=True, exist_ok=True) + shutil.move(fpath, new_dir / fpath.name) + + logger.info( + "Moved chunk files from %s to sub_dir: %s", + self.collect_pattern, + sub_dir, + )
+ + +
+[docs] + def collect( + self, + dataset_in, + dataset_out=None, + memory_utilization_limit=0.7, + pass_through=False, + ): + """Collect a dataset from h5_dir to h5_file + + Parameters + ---------- + dataset_in : str + Name of dataset to collect. If source shape is 2D, + time index will be collected as well. + dataset_out : str + Name of dataset into which collected data is to be written. + If `None` the name of the output dataset is assumed to match + the dataset input name. By default, `None`. + memory_utilization_limit : float + Memory utilization limit (fractional). This sets how many + sites will be collected at a time. By default, `0.7`. + pass_through : bool + Flag to just pass through dataset from one of the source + files, assuming all of the source files have identical + copies of this dataset. By default, `False`. + + See Also + -------- + Collector.add_dataset + Collect a dataset into an existing HDF5 file. + """ + + logger.info( + "Collecting dataset %r from files based on pattern %r to output: " + "%s", + dataset_in, + self.collect_pattern, + self.h5_out, + ) + start_time = time.time() + dataset_shape = self.get_dataset_shape(dataset_in) + if len(dataset_shape) > 1: + self.combine_time_index() + logger.debug("\t- 'time_index' collected") + + DatasetCollector.collect_dataset( + self.h5_out, + self.h5_files, + self.gids, + dataset_in, + dataset_out=dataset_out, + memory_utilization_limit=memory_utilization_limit, + pass_through=pass_through, + ) + + logger.debug("\t- Collection of %r complete", dataset_in) + + elapsed_time = (time.time() - start_time) / 60 + logger.info("Collection complete") + logger.debug("\t- Collection took %.4f minutes", elapsed_time)
+ + +
+[docs] + @classmethod + def add_dataset( + cls, + h5_file, + collect_pattern, + dataset_in, + dataset_out=None, + memory_utilization_limit=0.7, + pass_through=False, + ): + """Collect and add a dataset to a single HDF5 file. + + Parameters + ---------- + h5_file : path-like + Path to output HDF5 file into which data will be collected. + Note that this file must already exist and have a valid + `meta`. + collect_pattern : str + Unix-style /filepath/pattern*.h5 representing a list of + input files to be collected into a single HDF5 file. + dataset_in : str + Name of dataset to collect. If source shape is 2D, + time index will be collected as well. + dataset_out : str + Name of dataset into which collected data is to be written. + If `None` the name of the output dataset is assumed to match + the dataset input name. By default, `None`. + memory_utilization_limit : float + Memory utilization limit (fractional). This sets how many + sites will be collected at a time. By default, `0.7`. + pass_through : bool + Flag to just pass through dataset from one of the source + files, assuming all of the source files have identical + copies of this dataset. By default, `False`. + + See Also + -------- + Collector.collect + Collect a dataset into a file that does not yet exist. + """ + logger.info( + "Collecting dataset %r from files based on pattern %r and " + "adding to: %s", + dataset_in, + collect_pattern, + h5_file, + ) + with Resource(h5_file) as res: + points = res.meta + + collector = cls(h5_file, collect_pattern, points, clobber=False) + collector.collect( + dataset_in, + dataset_out=dataset_out, + memory_utilization_limit=memory_utilization_limit, + pass_through=pass_through, + )
+
+ + + +
+[docs] +def parse_project_points(project_points): + """Extract resource gids from project points. + + Parameters + ---------- + project_points : str | slice | list | pandas.DataFrame + Reference to resource points that were processed and need + collecting. + + Returns + ------- + gids : list + List of resource gids that are to be collected. + """ + try: + project_points = pd.read_csv(project_points) + except (TypeError, ValueError): + pass + + points = project_points_from_container_or_slice(project_points) + if not sorted(points) == points: + msg = ( + "Project points contain non-ordered meta data GIDs! This " + "can cause issues with the collection ordering. Please " + "check your data carefully." + ) + logger.warning(msg) + warn(msg, gapsCollectionWarning) + return points
+ + + +
+[docs] +def find_h5_files(collect_pattern, ignore=None): + """Search pattern for existing HDF5 files. + + Parameters + ---------- + collect_pattern : str + Unix-style /filepath/pattern*.h5 representing a list of + input files to be collected into a single HDF5 file. + ignore : str | container | None, optional + File name(s) to ignore. By default, `None`. + + Returns + ------- + list + List of sorted filepaths. + + Raises + ------ + gapsRuntimeError + If not all source files end in ".h5" (i.e. are not of type HDF5). + """ + ignore = ignore or [] + if isinstance(ignore, str): + ignore = [ignore] + + h5_files = [] + logger.debug("Looking for source files based on %s", collect_pattern) + + collect_pattern = Path(collect_pattern) + h5_files = [ + fp + for fp in collect_pattern.parent.glob(collect_pattern.name) + if not fp.name in ignore + ] + h5_files = sorted(h5_files, key=lambda fp: fp.name) + + if not all(fp.name.endswith(".h5") for fp in h5_files): + msg = ( + f"Source pattern resulted in non-h5 files that cannot " + f"be collected: {h5_files}, pattern: {collect_pattern}" + ) + raise gapsRuntimeError(msg) + + return h5_files
+ + + +
+[docs] +def parse_gids_from_files(h5_files): + """Extract a gid list from a list of h5_files. + + Parameters + ---------- + h5_files : list + List of h5 files to be collected. + + Returns + ------- + gids : list + List of resource gids to be collected. + """ + logger.debug("Parsing gid list from files...") + meta = [parse_meta(h5_file) for h5_file in h5_files] + meta = pd.concat(meta, axis=0) + gids = list(meta["gid"].values.astype(int).tolist()) + + if len(gids) > len(set(gids)): + msg = "Duplicate GIDs were found in source files!" + warn(msg, gapsCollectionWarning) + + if not sorted(gids) == gids: + msg = ( + "Collection was run without project points file and with " + "non-ordered meta data GIDs! This can cause issues with " + "the collection ordering. Please check your data " + "carefully." + ) + warn(msg, gapsCollectionWarning) + + return gids
+ + + +
+[docs] +def parse_meta(h5_file): + """Extract and convert meta data from a rec.array to a DataFrame. + + Parameters + ---------- + h5_file : path-like + Path to HDF5 file from which meta is to be parsed. + + Returns + ------- + meta : :class:`pd.DataFrame` + Portion of meta data corresponding to sites in `h5_file`. + """ + with Resource(h5_file) as res: + meta = res.meta + + return meta
+ + + +def _get_site_mem_req(shape, dtype, num_prototype_sites=100): + """Calculate memory requirement to collect a dataset. + + Parameters + ---------- + shape : tuple + Shape of dataset to be collected (n_time, n_sites). + dtype : np.dtype + Numpy dtype of dataset (disk dtype). + num_prototype_sites : int, optional + Number of sites to prototype the memory req with. By default, + `100`. + + Returns + ------- + site_mem : float + Memory requirement in bytes to collect a dataset with shape and + dtype. + """ + num_prototype_time_steps = shape[0] if len(shape) > 1 else 1 + shape = (num_prototype_time_steps, num_prototype_sites) + site_mem = sys.getsizeof(np.ones(shape, dtype=dtype)) + return site_mem / num_prototype_sites + + +def _get_gid_slice(gids_out, source_gids, fn_source): + """Return site slice that the chunked set of source gids belongs to. + + Parameters + ---------- + gids_out : list + List of resource GIDS in the final output meta data f_out. + source_gids : list + List of resource GIDS in one chunk of source data. + fn_source : str + Source filename for warning printout. + + Returns + ------- + site_slice : slice | np.ndarray + Slice in the final output file to write data to from source + gids. If gids in destination file are non-sequential, a boolean + array of indexes is returned and a warning is thrown. + """ + gid_in_source = np.isin(gids_out, source_gids) + locs = np.where(gid_in_source)[0] + if len(locs) == 0: + msg = ( + f"DatasetCollector could not locate source gids in output " + f"gids.\n\tSource gids: {source_gids}" + f"\n\tOutput gids: {gids_out}" + ) + raise gapsRuntimeError(msg) + + sequential_locs = np.arange(locs.min(), locs.max() + 1) + + if (locs != sequential_locs).any(): + msg = ( + f"GID indices for source file {fn_source!r} are not " + f"sequential in destination file!" + ) + warn(msg, gapsCollectionWarning) + return gid_in_source + + return slice(locs.min(), locs.max() + 1) +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/config.html b/_modules/gaps/config.html new file mode 100644 index 00000000..9484d002 --- /dev/null +++ b/_modules/gaps/config.html @@ -0,0 +1,799 @@ + + + + + + + + + + gaps.config — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.config

+# -*- coding: utf-8 -*-
+"""
+GAPs config functions and classes.
+"""
+import collections
+from pathlib import Path
+from abc import ABC, abstractmethod
+
+import json
+import yaml
+import toml
+import pyjson5
+
+from gaps.log import init_logger
+from gaps.utilities import CaseInsensitiveEnum, resolve_path
+from gaps.exceptions import gapsValueError
+
+_CONFIG_HANDLER_REGISTRY = {}
+
+
+# pylint: disable=too-few-public-methods
+class _JSON5Formatter:
+    """Format input JSON5 data with indentation."""
+
+    def __init__(self, data):
+        self.data = data
+
+    def _format_as_json(self):
+        """Format the data input with as string with indentation."""
+        return json.dumps(self.data, indent=4)
+
+
+
+[docs] +class Handler(ABC): + """ABC for configuration file handler.""" + + def __init_subclass__(cls): + super().__init_subclass__() + if isinstance(cls.FILE_EXTENSION, str): + _CONFIG_HANDLER_REGISTRY[cls.FILE_EXTENSION] = cls + else: + for file_extension in cls.FILE_EXTENSION: + _CONFIG_HANDLER_REGISTRY[file_extension] = cls + +
+[docs] + @classmethod + def load(cls, file_name): + """Load the file contents.""" + with open(file_name, "r") as config_file: + config_str = config_file.read() + return cls.loads(config_str)
+ + +
+[docs] + @classmethod + def write(cls, file_name, data): + """Write the data to a file.""" + with open(file_name, "w") as config_file: + cls.dump(data, config_file)
+ + +
+[docs] + @classmethod + @abstractmethod + def dump(cls, config, stream): + """Write the config to a stream (file)."""
+ + +
+[docs] + @classmethod + @abstractmethod + def dumps(cls, config): + """Convert the config to a string."""
+ + +
+[docs] + @classmethod + @abstractmethod + def loads(cls, config_str): + """Parse the string into a config dictionary."""
+ + + # pylint: disable=invalid-name + @property + @abstractmethod + def FILE_EXTENSION(self): + """str: Enum name to use"""
+ + + +
+[docs] +class JSONHandler(Handler): + """JSON config file handler.""" + + FILE_EXTENSION = "json" + +
+[docs] + @classmethod + def dump(cls, config, stream): + """Write the config to a stream (JSON file).""" + return json.dump(config, stream, indent=4)
+ + +
+[docs] + @classmethod + def dumps(cls, config): + """Convert the config to a JSON string.""" + return json.dumps(config, indent=4)
+ + +
+[docs] + @classmethod + def loads(cls, config_str): + """Parse the JSON string into a config dictionary.""" + return json.loads(config_str)
+
+ + + +# pylint: disable=no-member +
+[docs] +class JSON5Handler(Handler): + """JSON5 config file handler.""" + + FILE_EXTENSION = "json5" + +
+[docs] + @classmethod + def dump(cls, config, stream): + """Write the config to a stream (JSON5 file).""" + return pyjson5.encode_io( + _JSON5Formatter(config), + stream, + supply_bytes=False, + tojson="_format_as_json", + )
+ + +
+[docs] + @classmethod + def dumps(cls, config): + """Convert the config to a JSON5 string.""" + return pyjson5.encode( + _JSON5Formatter(config), + tojson="_format_as_json", + )
+ + +
+[docs] + @classmethod + def loads(cls, config_str): + """Parse the JSON5 string into a config dictionary.""" + return pyjson5.decode(config_str, maxdepth=-1)
+
+ + + +
+[docs] +class YAMLHandler(Handler): + """YAML config file handler.""" + + FILE_EXTENSION = "yaml", "yml" + +
+[docs] + @classmethod + def dump(cls, config, stream): + """Write the config to a stream (YAML file).""" + return yaml.safe_dump(config, stream, indent=2, sort_keys=False)
+ + +
+[docs] + @classmethod + def dumps(cls, config): + """Convert the config to a YAML string.""" + return yaml.safe_dump(config, indent=2, sort_keys=False)
+ + +
+[docs] + @classmethod + def loads(cls, config_str): + """Parse the YAML string into a config dictionary.""" + return yaml.safe_load(config_str)
+
+ + + +
+[docs] +class TOMLHandler(Handler): + """TOML config file handler.""" + + FILE_EXTENSION = "toml" + +
+[docs] + @classmethod + def dump(cls, config, stream): + """Write the config to a stream (TOML file).""" + return toml.dump(config, stream)
+ + +
+[docs] + @classmethod + def dumps(cls, config): + """Convert the config to a TOML string.""" + return toml.dumps(config)
+ + +
+[docs] + @classmethod + def loads(cls, config_str): + """Parse the TOML string into a config dictionary.""" + return toml.loads(config_str)
+
+ + + +class _ConfigType(CaseInsensitiveEnum): + """A base config type enum class only meant to be initialized once.""" + + @classmethod + def _new_post_hook(cls, obj, value): + """Hook for post-processing after __new__; adds config methods""" + obj.dump = _CONFIG_HANDLER_REGISTRY[value].dump + obj.dumps = _CONFIG_HANDLER_REGISTRY[value].dumps + obj.load = _CONFIG_HANDLER_REGISTRY[value].load + obj.loads = _CONFIG_HANDLER_REGISTRY[value].loads + obj.write = _CONFIG_HANDLER_REGISTRY[value].write + return obj + + +# pylint: disable=unexpected-keyword-arg,too-many-function-args +ConfigType = _ConfigType( + "ConfigType", + { + config_type.upper(): config_type + for config_type in _CONFIG_HANDLER_REGISTRY + }, +) +"""An enumeration of the available gaps config types. """ + + +
+[docs] +def config_as_str_for_docstring( + config, config_type=ConfigType.JSON, num_spaces=12 +): + """Convert a config into a string to be used within a docstring. + + In particular, the config is serialized and extra whitespace is + added after each newline. + + Parameters + ---------- + config : dict + Dictionary containing the configuration to be converted into + docstring format. + config_type : :class:`ConfigType`, optional + A :class:`ConfigType` enumeration value specifying what type + of config file to generate. By default, :attr:`ConfigType.JSON`. + num_spaces : int, optional + Number of spaces to add after a newline. By default, `12`. + + Returns + ------- + str + A string version of the input config, conforming to the + specified config type. + """ + newline_str = "".join(["\n"] + [" "] * num_spaces) + return config_type.dumps(config).replace("\n", newline_str)
+ + + +
+[docs] +def load_config(config_filepath, resolve_paths=True): + """Load a config file + + Parameters + ---------- + config_filepath : path-like + Path to config file. + resolve_paths : bool, optional + Option to (recursively) resolve file-paths in the dictionary + w.r.t the config file directory. + By default, ``True``. + + Returns + ------- + dict + Dictionary containing configuration parameters. + + Raises + ------ + gapsValueError + If input `config_filepath` has no file ending. + """ + # TODO: maybe also have a "required keys" argument + config_filepath = Path(config_filepath).expanduser().resolve() + if "." not in config_filepath.name: + msg = ( + f"Configuration file must have a file-ending. Got: " + f"{config_filepath.name}" + ) + raise gapsValueError(msg) + config_type = ConfigType(config_filepath.name.split(".")[-1]) + config = config_type.load(config_filepath) + if resolve_paths: + return resolve_all_paths(config, config_filepath.parent) + + return config
+ + + +
+[docs] +def resolve_all_paths(container, base_dir): + """Perform a deep string replacement and path resolve in `container`. + + Parameters + ---------- + container : dict | list + Container like a dictionary or list that may (or may not) + contain relative paths to resolve. + base_dir : path-like + Base path to directory from which to resolve path string + (typically current directory) + + Returns + ------- + container + Input container with updated strings. + """ + + if isinstance(container, str): + # `resolve_path` is safe to call on any string, + # even if it is not a path + container = resolve_path(container, Path(base_dir)) + + elif isinstance(container, collections.abc.Mapping): + container = { + key: resolve_all_paths(val, Path(base_dir)) + for key, val in container.items() + } + + elif isinstance(container, collections.abc.Sequence): + container = [ + resolve_all_paths(item, Path(base_dir)) for item in container + ] + + return container
+ + + +
+[docs] +def init_logging_from_config_file(config_file, background=False): + """Init logging, taking care of legacy rex-style kwargs. + + Parameters + ---------- + config_file : path-like + Path to a config file parsable as a dictionary which may or may + not contain a "logging" key. If key not found, no further action + is taken. If key is found, the value is expected to be a + dictionary of keyword-argument pairs + to :func:`gaps.log.init_logger`. rex-style keys ("log_level", + "log_file", "log_format") are allowed. The values of these + inputs are used to initialize a gaps logger. + background : bool, optional + Optional flag to specify background job initialization. If + ``True``, then stream output is disabled. By default, ``False``. + """ + config = load_config(config_file) + if "logging" not in config: + return + + kwargs = config["logging"] + kwarg_map = {"log_level": "level", "log_file": "file", "log_format": "fmt"} + for legacy_kwarg, new_kwarg in kwarg_map.items(): + if legacy_kwarg in kwargs: + kwargs[new_kwarg] = kwargs.pop(legacy_kwarg) + + if background: + kwargs["stream"] = False + + init_logger(**kwargs)
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/exceptions.html b/_modules/gaps/exceptions.html new file mode 100644 index 00000000..a5e8be85 --- /dev/null +++ b/_modules/gaps/exceptions.html @@ -0,0 +1,472 @@ + + + + + + + + + + gaps.exceptions — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.exceptions

+# -*- coding: utf-8 -*-
+# pylint: disable=invalid-name
+"""Custom Exceptions and Errors for gaps. """
+import logging
+
+logger = logging.getLogger("gaps")
+
+
+
+[docs] +class gapsError(Exception): + """Generic gaps Error.""" + + def __init__(self, *args, **kwargs): + """Init exception and broadcast message to logger.""" + super().__init__(*args, **kwargs) + if args: + logger.error(str(args[0]), stacklevel=2)
+ + + +
+[docs] +class gapsConfigError(gapsError): + """gaps ConfigError."""
+ + + +
+[docs] +class gapsExecutionError(gapsError): + """gaps ExecutionError."""
+ + + +
+[docs] +class gapsIndexError(gapsError, IndexError): + """gaps IndexError."""
+ + + +
+[docs] +class gapsIOError(gapsError, IOError): + """gaps IOError."""
+ + + +
+[docs] +class gapsKeyError(gapsError, KeyError): + """gaps KeyError."""
+ + + +
+[docs] +class gapsRuntimeError(gapsError, RuntimeError): + """gaps RuntimeError."""
+ + + +
+[docs] +class gapsTypeError(gapsError, TypeError): + """gaps TypeError."""
+ + + +
+[docs] +class gapsValueError(gapsError, ValueError): + """gaps ValueError."""
+ + + +
+[docs] +class gapsHPCError(gapsError): + """gaps HPCError."""
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/hpc.html b/_modules/gaps/hpc.html new file mode 100644 index 00000000..33c3aa17 --- /dev/null +++ b/_modules/gaps/hpc.html @@ -0,0 +1,1206 @@ + + + + + + + + + + gaps.hpc — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.hpc

+# -*- coding: utf-8 -*-
+# pylint: disable=invalid-name,arguments-differ,too-many-arguments
+"""
+HPC Execution utilities.
+"""
+import re
+import shlex
+import logging
+import getpass
+import subprocess
+from math import floor
+from pathlib import Path
+from warnings import warn
+from collections import namedtuple
+from abc import ABC, abstractmethod
+
+
+from gaps.exceptions import gapsExecutionError, gapsHPCError, gapsValueError
+from gaps.warnings import gapsHPCWarning
+
+
+logger = logging.getLogger(__name__)
+
+
+Q_COLUMNS = namedtuple("Q_COLUMNS", ["NAME", "ID", "STATUS"])
+COMMANDS = namedtuple("COMMANDS", ["SUBMIT", "CANCEL"])
+DEFAULT_STDOUT_PATH = "./stdout"
+"""Default directory for .stdout and .stderr files."""
+
+
+
+[docs] +class HpcJobManager(ABC): + """Abstract HPC job manager framework""" + + # get username as class attribute. + USER = getpass.getuser() + + # set a max job name length, will raise error if too long. + MAX_NAME_LEN = 100 + + SHELL_FILENAME_FMT = "{}.sh" + + def __init__(self, user=None, queue_dict=None): + """ + Parameters + ---------- + user : str | None, optional + HPC username. `None` will get your username using + :func:`getpass.getuser`. By default, `None`. + queue_dict : dict | None, optional + Parsed HPC queue dictionary from :meth:`parse_queue_str`. + `None` will get the queue info from the hardware. + By default, `None`. + """ + + self._user = user or self.USER + if queue_dict is not None and not isinstance(queue_dict, dict): + msg = ( + f"HPC queue_dict arg must be None or Dict but received: " + f"{queue_dict}, {type(queue_dict)}" + ) + raise gapsHPCError(msg) + + self._queue = queue_dict + +
+[docs] + @classmethod + def parse_queue_str(cls, queue_str): + """Parse the hardware queue string into a nested dictionary. + + This function parses the queue output string into a dictionary + keyed by integer job ids with values as dictionaries of job + properties (queue printout columns). + + Parameters + ---------- + queue_str : str + HPC queue output string. Typically a space-delimited string + with line breaks. + + Returns + ------- + queue_dict : dict + HPC queue parsed into dictionary format keyed by integer job + ids with values as dictionaries of job properties (queue + printout columns). + """ + + queue_dict = {} + header, queue_rows = cls._split_queue(queue_str) + for row in queue_rows: + job = [k.strip(" ") for k in row.strip(" ").split(" ") if k != ""] + job_id = int(job[header.index(cls.COLUMN_HEADERS.ID)]) + queue_dict[job_id] = dict(zip(header, job)) + + return queue_dict
+ + + @property + def queue(self): + """dict: HPC queue keyed by job ids with values as job properties.""" + if self._queue is None: + queue_str = self.query_queue() + self._queue = self.parse_queue_str(queue_str) + + return self._queue + +
+[docs] + def reset_query_cache(self): + """Reset the query dict cache so that hardware is queried again.""" + self._queue = None
+ + +
+[docs] + def check_status_using_job_id(self, job_id): + """Check the status of a job using the HPC queue and job ID. + + Parameters + ---------- + job_id : int + Job integer ID number. + + Returns + ------- + status : str | None + Queue job status string or `None` if not found. + """ + job_id = int(job_id) + return self.queue.get(job_id, {}).get(self.COLUMN_HEADERS.STATUS)
+ + +
+[docs] + def check_status_using_job_name(self, job_name): + """Check the status of a job using the HPC queue and job name. + + Parameters + ---------- + job_name : str + Job name string. + + Returns + ------- + status : str | None + Queue job status string or `None` if not found. + """ + for attrs in self.queue.values(): + if attrs[self.COLUMN_HEADERS.NAME] == job_name: + return attrs[self.COLUMN_HEADERS.STATUS] + + return None
+ + +
+[docs] + def cancel(self, arg): + """Cancel a job. + + Parameters + ---------- + arg : int | list | str + Integer job id(s) to cancel. Can be a list of integer + job ids, 'all' to cancel all jobs, or a feature (-p short) + to cancel all jobs with a given feature + """ + self._validate_command_not_none("CANCEL") + + if isinstance(arg, (list, tuple)): + for job_id in arg: + self.cancel(job_id) + + elif str(arg).lower() == "all": + self.reset_query_cache() + for job_id in self.queue.keys(): + self.cancel(job_id) + + elif isinstance(arg, (int, str)): + cmd = f"{self.COMMANDS.CANCEL} {arg}" + cmd = shlex.split(cmd) + subprocess.call(cmd) + + else: + raise gapsExecutionError( + f"Could not cancel: {arg} with type {type(arg)}" + )
+ + +
+[docs] + def submit( + self, + name, + keep_sh=False, + **kwargs, + ): + """Submit a job on the HPC. + + Parameters + ---------- + name : str + HPC job name. + keep_sh : bool, optional + Option to keep the submission script on disk. + By default, `False`. + **kwargs + Extra keyword-argument pairs to be passed to + :meth:`make_script_str`. + + Returns + ------- + out : str + Standard output from submission. If submitted successfully, + this is the Job ID. + err : str + Standard error. This is an empty string if the job was + submitted successfully. + """ + self._validate_command_not_none("SUBMIT") + self._validate_name_length(name) + if self._job_is_running(name): + logger.info( + "Not submitting job %r because it is already in " + "queue has been recently submitted", + name, + ) + return None, "already_running" + + self._setup_submission(name, **kwargs) + + out, err = submit( + f"{self.COMMANDS.SUBMIT} {self.SHELL_FILENAME_FMT.format(name)}" + ) + out = self._teardown_submission(name, out, err, keep_sh=keep_sh) + return out, err
+ + + def _validate_command_not_none(self, command): + """Validate that a command is not `None`.""" + if getattr(self.COMMANDS, command) is None: + raise NotImplementedError( + f"{command!r} command has not been defined for class" + f"{self.__class__.__name__!r}" + ) + + def _validate_name_length(self, name): + """Validate that the name does not exceed max length.""" + if len(name) > self.MAX_NAME_LEN: + raise gapsValueError( + f"Cannot submit job with name longer than " + f"{self.MAX_NAME_LEN} chars: {name!r}" + ) + + def _setup_submission(self, name, **kwargs): + """Setup submission file and directories.""" + stdout_dir = Path(kwargs.get("stdout_path", DEFAULT_STDOUT_PATH)) + stdout_dir.mkdir(parents=True, exist_ok=True) + + script = self.make_script_str(name, **kwargs) + + make_sh(self.SHELL_FILENAME_FMT.format(name), script) + + def _teardown_submission(self, name, out, err, keep_sh=False): + """Remove submission file and mark job as submitted.""" + if not keep_sh: + Path(self.SHELL_FILENAME_FMT.format(name)).unlink() + + if err: + warn( + f"Received an error or warning during submission: {err}", + gapsHPCWarning, + ) + else: + out = self._mark_job_as_submitted(name, out) + + return out + + def _mark_job_as_submitted(self, name, out): + """Mark job as submitted in the queue.""" + job_id = int(_job_id_or_out(out).split(" ")[-1]) + out = str(job_id) + logger.debug("Job %r with id #%s submitted successfully", name, job_id) + self._queue[job_id] = { + self.COLUMN_HEADERS.ID: job_id, + self.COLUMN_HEADERS.NAME: name, + self.COLUMN_HEADERS.STATUS: self.Q_SUBMITTED_STATUS, + } + return out + + @abstractmethod + def _job_is_running(self, name): + """Check wether the job is submitted/running.""" + raise NotImplementedError + + @classmethod + @abstractmethod + def _split_queue(cls, queue_str): + """Split queue string into a header and a list of lines.""" + raise NotImplementedError + +
+[docs] + @abstractmethod + def query_queue(self): + """Run the HPC queue command and return the raw stdout string. + + Returns + ------- + stdout : list + HPC queue output string that can be split into a list on + line breaks. + """ + raise NotImplementedError
+ + +
+[docs] + @abstractmethod + def make_script_str(self, name, **kwargs): + """Generate the submission script.""" + raise NotImplementedError
+ + + @property + @abstractmethod + def COLUMN_HEADERS(self): + """`namedtuple`: Column header names.""" + raise NotImplementedError + + @property + @abstractmethod + def COMMANDS(self): + """`namedtuple`: Command names.""" + raise NotImplementedError + + @property + @abstractmethod + def Q_SUBMITTED_STATUS(self): + """str: String representing the submitted status for this manager.""" + raise NotImplementedError
+ + + +
+[docs] +class PBS(HpcJobManager): + """Subclass for PBS subprocess jobs.""" + + COLUMN_HEADERS = Q_COLUMNS(NAME="Name", ID="Job id", STATUS="S") + + # String representing the submitted status for this manager + Q_SUBMITTED_STATUS = "Q" + + COMMANDS = COMMANDS(SUBMIT="qsub", CANCEL="qdel") # cspell:disable-line + +
+[docs] + def query_queue(self): + """Run the PBS qstat command and return the raw stdout string. + + Returns + ------- + stdout : str + qstat output string. Can be split on line breaks to get + list. + """ + stdout, _ = submit(f"qstat -u {self._user}") + return _skip_q_rows(stdout, skip_rows=(0, 1))
+ + + @classmethod + def _split_queue(cls, queue_str): + """Split queue into a header and a list of lines.""" + header = ("Job id", "Name", "User", "Time Use", "S", "Queue") + return header, queue_str.split("\n") + + def _job_is_running(self, name): + """Check wether the job is submitted/running.""" + return self.check_status_using_job_name(name) in {"Q", "R"} + +
+[docs] + def make_script_str( + self, + name, + cmd, + allocation, + walltime, + qos=None, + memory=None, + queue=None, + feature=None, + stdout_path=DEFAULT_STDOUT_PATH, + conda_env=None, + sh_script=None, + ): + """Generate the PBS submission script. + + Parameters + ---------- + name : str + PBS job name. + cmd : str + Command to be submitted in PBS shell script. Example: + 'python -m reV.generation.cli_gen' + allocation : str + HPC allocation account. Example: 'rev'. + walltime : int | float + Node walltime request in hours. Example: 4. + qos : str, optional + Quality of service string for job. By default, `None`. + memory : int , optional + Node memory request in GB. By default, `None`. + queue : str + HPC queue to submit job to. Examples include: 'debug', + 'short', 'batch', 'batch-h', 'long', etc. By default, + `None`, which uses `test_queue`. + feature : str, optional + PBS feature request (-l {feature}). Example: + 'feature=24core'. *Do not use this input for QOS. Use the + ``qos`` arg instead.* By default, `None`. + stdout_path : str, optional + Path to print .stdout and .stderr files. + By default, :attr:`DEFAULT_STDOUT_PATH`. + conda_env : str, optional + Conda environment to activate. By default, `None`. + sh_script : str, optional + Script to run before executing command. By default, `None`. + + Returns + ------- + str + PBS script to submit. + """ + features = [ + str(feature).replace(" ", "") + if feature and "qos" not in feature + else "", + f"walltime={format_walltime(walltime)}" if walltime else "", + f"mem={memory}gb" if memory else "", + ] + if qos: + features += [f"qos={qos}"] + features = ",".join(filter(None, features)) + script_args = [ + "#!/bin/bash", + f"#PBS -N {name} # job name", + f"#PBS -A {allocation} # allocation account", + f"#PBS -q {queue} # queue" if queue else "", + # cspell:disable-next-line + f"#PBS -o {stdout_path}/{name}_$PBS_JOBID.o", + # cspell:disable-next-line + f"#PBS -e {stdout_path}/{name}_$PBS_JOBID.e", + f"#PBS -l {features}" if features else "", + format_env(conda_env), + # cspell:disable-next-line + "echo Running on: $HOSTNAME, Machine Type: $MACHTYPE", + "echo Running python in directory `which python`", + sh_script, + cmd, + ] + return "\n".join(filter(None, script_args))
+
+ + + +
+[docs] +class SLURM(HpcJobManager): + """Subclass for SLURM subprocess jobs.""" + + # cspell:disable-next-line + COLUMN_HEADERS = Q_COLUMNS(NAME="NAME", ID="JOBID", STATUS="ST") + + # String representing the submitted status for this manager + Q_SUBMITTED_STATUS = "PD" + + # cspell:disable-next-line + COMMANDS = COMMANDS(SUBMIT="sbatch", CANCEL="scancel") + +
+[docs] + def query_queue(self): + """Run the HPC queue command and return the raw stdout string. + + Returns + ------- + stdout : str + HPC queue output string. Can be split on line breaks to get + a list. + """ + cmd = ( + f'squeue -u {self._user} --format="%.15i %.30P ' + f'%.{self.MAX_NAME_LEN}j %.20u %.10t %.15M %.25R %q"' + ) + stdout, _ = submit(cmd) + return _skip_q_rows(stdout)
+ + + @classmethod + def _split_queue(cls, queue_str): + """Split queue into a header and a list of lines.""" + queue_rows = queue_str.split("\n") + header = [ + k.strip(" ") + for k in queue_rows[0].strip(" ").split(" ") + if k != "" + ] + return header, queue_rows[1:] + + def _job_is_running(self, name): + """Check wether the job is submitted/running.""" + return self.check_status_using_job_name(name) is not None + +
+[docs] + def make_script_str( + self, + name, + cmd, + allocation, + walltime, + qos="normal", + memory=None, + feature=None, + stdout_path=DEFAULT_STDOUT_PATH, + conda_env=None, + sh_script=None, + ): + """Generate the SLURM submission script. + + Parameters + ---------- + name : str + SLURM job name. + cmd : str + Command to be submitted in SLURM shell script. Example: + 'python -m reV.generation.cli_gen' + allocation : str + HPC allocation account. Example: 'rev'. + walltime : int | float + Node walltime request in hours. Example: 4. + qos : {"normal", "high"} + Quality of service specification for job. Jobs with "high" + priority will be charged at 2x the rate. By default, + ``"normal"``. + memory : int , optional + Node memory request in GB. By default, `None`. + feature : str, optional + Additional flags for SLURM job. Format is + "--partition=debug" or "--depend=[state:job_id]". + *Do not use this input to specify QOS. Use the ``qos`` input + instead.* By default, `None`. + stdout_path : str, optional + Path to print .stdout and .stderr files. + By default, :attr:`DEFAULT_STDOUT_PATH`. + conda_env : str, optional + Conda environment to activate. By default, `None`. + sh_script : str, optional + Script to run before executing command. By default, `None`. + + Returns + ------- + str + SLURM script to submit. + """ + walltime = format_walltime(walltime) + memory = memory * 1000 if memory else None + + script_args = [ + "#!/bin/bash", + f"#SBATCH --account={allocation}" if allocation else "", + f"#SBATCH --time={walltime}" if walltime else "", + f"#SBATCH --job-name={name} # job name", + "#SBATCH --nodes=1 # number of nodes", + f"#SBATCH --output={stdout_path}/{name}_%j.o", + f"#SBATCH --error={stdout_path}/{name}_%j.e", + f"#SBATCH --qos={qos}", + f"#SBATCH {feature} # extra feature" if feature else "", + f"#SBATCH --mem={memory} # node RAM in MB" if memory else "", + format_env(conda_env), + # cspell:disable-next-line + "echo Running on: $HOSTNAME, Machine Type: $MACHTYPE", + "echo Running python in directory `which python`", + sh_script, + cmd, + ] + return "\n".join(filter(None, script_args))
+
+ + + +
+[docs] +def make_sh(fname, script): + """Make a shell script (.sh file) to execute a subprocess. + + Parameters + ---------- + fname : str + Name of the .sh file to create. + script : str + Contents to be written into the .sh file. + """ + logger.debug( + "The shell script %(n)r contains the following:\n" + "~~~~~~~~~~ %(n)s ~~~~~~~~~~\n" + "%(s)s\n" + "~~~~~~~~~~ %(n)s ~~~~~~~~~~", + {"n": fname, "s": script}, + ) + with open(fname, "w+") as f: + f.write(script)
+ + + +def _subprocess_popen(cmd): + """Open a subprocess popen constructor and submit a command. + + Parameters + ---------- + cmd : str + Command to be submitted using python subprocess. + + Returns + ------- + stdout : str + Subprocess standard output. This is decoded from the subprocess + stdout with rstrip. + stderr : str + Subprocess standard error. This is decoded from the subprocess + stderr with rstrip. After decoding/rstrip, this will be empty if + the subprocess doesn't return an error. + """ + + cmd = shlex.split(cmd) + + # pylint: disable=consider-using-with + # use subprocess to submit command and get piped o/e + process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = process.communicate() + stderr = stderr.decode("ascii").rstrip() + stdout = stdout.decode("ascii").rstrip() + + if process.returncode != 0: + raise OSError( + f"Subprocess submission failed with return code " + f"{process.returncode} and stderr:\n{stderr}" + ) + + return stdout, stderr + + +def _subprocess_run(cmd, background_stdout=False): + """Open a subprocess and submit a command. + + Parameters + ---------- + cmd : str + Command to be submitted using python subprocess. + background_stdout : bool + Flag to capture the stdout/stderr from the background process + in a nohup.out file. + """ + + nohup_cmd_fmt = ["nohup {}"] + if not background_stdout: + nohup_cmd_fmt += ["</dev/null >/dev/null 2>&1"] + nohup_cmd_fmt += ["&"] + + cmd = " ".join(nohup_cmd_fmt).format(cmd) + subprocess.run(cmd, shell=True, check=True) + + +
+[docs] +def submit(cmd, background=False, background_stdout=False): + """Open a subprocess and submit a command. + + Parameters + ---------- + cmd : str + Command to be submitted using python subprocess. + background : bool + Flag to submit subprocess in the background. stdout stderr will + be empty strings if this is True. + background_stdout : bool + Flag to capture the stdout/stderr from the background process + in a nohup.out file. + + Returns + ------- + stdout : str + Subprocess standard output. This is decoded from the subprocess + stdout with rstrip. + stderr : str + Subprocess standard error. This is decoded from the subprocess + stderr with rstrip. After decoding/rstrip, this will be empty if + the subprocess doesn't return an error. + """ + + if background: + _subprocess_run(cmd, background_stdout=background_stdout) + return "", "" + + stdout, stderr = _subprocess_popen(cmd) + return stdout, stderr
+ + + +
+[docs] +def format_walltime(hours=None): + """Get the SLURM walltime string in format "HH:MM:SS" + + Parameters + ---------- + hours : float | int, optional + Requested number of job hours. By default, `None`, which returns + an empty string. + + Returns + ------- + walltime : str + SLURM walltime request in format "#SBATCH --time=HH:MM:SS" + """ + if hours is not None: + m_str = f"{round(60 * (hours % 1)):02d}" + h_str = f"{floor(hours):02d}" + return f"{h_str}:{m_str}:00" + + return hours
+ + + +
+[docs] +def format_env(conda_env=None): + """Get special sbatch request strings for SLURM conda environments. + + Parameters + ---------- + conda_env : str, optional + Conda environment to activate. By default, `None`, which returns + an empty string. + + Returns + ------- + env_str : str + SBATCH shell script source activate environment request string. + """ + + if conda_env is not None: + return ( + f"echo source activate {conda_env}\n" + f"source activate {conda_env}\n" + f"echo conda env activate complete!\n" + ) + + return ""
+ + + +def _skip_q_rows(queue_str, skip_rows=None): + """Remove rows from the queue_str that are to be skipped. + + Parameters + ---------- + queue_str : str + HPC queue output string. Can be split on line breaks to get a + list. + skip_rows : list | None, optional + Optional row index values to skip. + + Returns + ------- + queue_str : str + HPC queue output string. Can be split on line breaks to get a + list. + """ + if skip_rows is None: + return queue_str + + queue_str = [ + row + for i, row in enumerate(queue_str.split("\n")) + if i not in skip_rows + ] + + return "\n".join(queue_str) + + +def _job_id_or_out(input_str): + """Extract job id from input. + + Specifically, this function checks the input for a job id and + returns just the job id if present. Otherwise, the full input is + returned. + + Parameters + ---------- + input_str : str + Stdout from :func:`submit` + + Returns + ------- + str + If job id is present in `input_str` then this is just the job + id. Otherwise, the full `input_str` is returned. + """ + return re.sub("[^0-9]", "", str(input_str)) or input_str +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/legacy.html b/_modules/gaps/legacy.html new file mode 100644 index 00000000..4d2b7610 --- /dev/null +++ b/_modules/gaps/legacy.html @@ -0,0 +1,884 @@ + + + + + + + + + + gaps.legacy — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.legacy

+# -*- coding: utf-8 -*-
+# pylint: disable=arguments-differ,arguments-renamed,unused-argument
+"""Legacy reV-like status manager. """
+import os
+import logging
+from abc import abstractmethod
+
+import gaps.batch
+import gaps.status
+import gaps.pipeline
+import gaps.hpc
+
+
+logger = logging.getLogger(__name__)
+
+_HARDWARE_MAP = {
+    "local": "local",
+    "eagle": "eagle",
+    "peregrine": "peregrine",
+    "kestrel": "kestrel",
+    "slurm": "kestrel",
+    "pbs": "peregrine",
+}
+
+
+
+[docs] +class PipelineError(Exception): + """Error for pipeline execution failure."""
+ + + +# pylint: disable=too-few-public-methods +
+[docs] +class HardwareStatusRetriever(gaps.status.HardwareStatusRetriever): + """Query hardware for job status.""" + + def __init__(self, hardware="slurm", subprocess_manager=None): + """Initialize `HardwareStatusRetriever`. + + Parameters + ---------- + hardware : str + Name of hardware that a pipeline is being run on: eagle, + slurm, local. Defaults to "slurm". + subprocess_manager : PBS | SLURM | None, optional + Optional initialized subprocess manager to use to check job + statuses. This can be input with cached queue data to avoid + constantly querying the HPC. By default, `None`. + """ + super().__init__(subprocess_manager) + self.hardware = gaps.status._validate_hardware( + _HARDWARE_MAP.get(hardware.lower(), hardware) + ) + + def __getitem__(self, key): + """Get the job status using pre-defined hardware-specific methods.""" + job_id, __ = key + if not job_id: + return None + + if self.subprocess_manager is not None: + return self.subprocess_manager.check_status_using_job_id(job_id) + + return self.hardware.check_status_using_job_id(job_id)
+ + + +
+[docs] +class Status(gaps.status.Status): + """Base class for data pipeline health and status information.""" + +
+[docs] + @classmethod + def retrieve_job_status( + cls, + status_dir, + module, + job_name, + hardware="slurm", + subprocess_manager=None, + ): + """Update and retrieve job status. + + Parameters + ---------- + status_dir : str + Directory containing json status file. + module : str + Module that the job belongs to. + job_name : str + Unique job name identification. + hardware : str + Name of hardware that this pipeline is being run on: eagle, + slurm, local. Defaults to "slurm". This specifies how job + are queried for status. + subprocess_manager : None | SLURM + Optional initialized subprocess manager to use to check job + statuses. This can be input with cached queue data to avoid + constantly querying the HPC. + + Returns + ------- + status : str | None + Status string or None if job/module not found. + """ + hsr = HardwareStatusRetriever( + _HARDWARE_MAP.get(hardware.lower(), hardware), subprocess_manager + ) + status = cls(status_dir)._retrieve_job_status(module, job_name, hsr) + if status == gaps.status.StatusOption.NOT_SUBMITTED: + status = None + return status
+ + +
+[docs] + @classmethod + def add_job( + cls, status_dir, module, job_name, replace=False, job_attrs=None + ): + """Add a job to status json. + + Parameters + ---------- + status_dir : str + Directory containing json status file. + module : str + Module that the job belongs to. + job_name : str + Unique job name identification. + replace : bool + Flag to force replacement of pre-existing job status. + job_attrs : dict + Job attributes. Should include 'job_id' if running on HPC. + """ + cls.mark_job_as_submitted( + status_dir=status_dir, + command=module, + job_name=job_name, + replace=replace, + job_attrs=job_attrs, + )
+ + +
+[docs] + @staticmethod + def make_job_file(status_dir, module, job_name, attrs): + """Make a json file recording the status of a single job. + + Parameters + ---------- + status_dir : str + Directory to put json status file. + module : str + Module that the job belongs to. + job_name : str + Unique job name identification. + attrs : str + Dictionary of job attributes that represent the job status + attributes. + """ + gaps.status.Status.make_single_job_file( + status_dir=status_dir, + command=module, + job_name=job_name, + attrs=attrs, + )
+ + + @staticmethod + def _update_job_status_from_hardware(job_data, hardware_status_retriever): + """Update job status from HPC hardware if needed.""" + + # init defaults in case job/command not in status file yet + job_status = job_data.get(gaps.status.StatusField.JOB_STATUS, None) + job_id = job_data.get(gaps.status.StatusField.JOB_ID, None) + job_hardware = job_data.get(gaps.status.StatusField.HARDWARE, None) + + # get job status from hardware + current = hardware_status_retriever[job_id, job_hardware] + + # No current status and job was not successful: failed! + if ( + current is None + and job_status != gaps.status.StatusOption.SUCCESSFUL + ): + job_data[ + gaps.status.StatusField.JOB_STATUS + ] = gaps.status.StatusOption.FAILED + + # do not overwrite a successful or failed job status. + elif ( + current != job_status + and job_status not in gaps.status.StatusOption.members_as_str() + ): + job_data[gaps.status.StatusField.JOB_STATUS] = current
+ + + +# pylint: disable=no-member,invalid-name,super-init-not-called +
+[docs] +class Pipeline(gaps.pipeline.Pipeline): + """Legacy reV-like pipeline execution framework.""" + + @abstractmethod + def __init__(self, pipeline, monitor=True, verbose=False): + """ + When subclassing this object, you MUST set the following + properties in the __init__ method: + + monitor : bool + Flag to perform continuous monitoring of the pipeline. + verbose : bool + Flag to submit pipeline steps with "-v" flag for debug + logging. + _config : object + reV-style config object. The config object MUST have the + following attributes: + + name : str + Job name (typically defaults to the output directory + name). + dirout : str + Output file directory (typically the same directory + that contains the config file). + hardware : str + Name of hardware that the pipeline is being run on + (typically "eagle"). + _run_list : list + List of dictionaries, each with a single key-value pair, + where the key represents the command and the value + represents the command config filepath to substitute into + the :attr:`CMD_BASE` string. + + You must also call :meth:`_init_status` in the initializer. + If you want logging outputs during the submit step, make sure + to init the "gaps" logger. + + Parameters + ---------- + pipeline : str | dict + Pipeline config file path or dictionary. + monitor : bool, optional + Flag to perform continuous monitoring of the pipeline. + By default, `True`. + verbose : bool, optional + Flag to submit pipeline steps with "-v" flag for debug + logging. By default, `False`. + """ + raise NotImplementedError + + @property + @abstractmethod + def CMD_BASE(self): + """str: Formattable string of the base pipeline CLI command.""" + raise NotImplementedError + + @property + @abstractmethod + def COMMANDS(self): + """list: List of pipeline command names (as str).""" + raise NotImplementedError + + @property + def _out_dir(self): + """path-like: Output directory.""" + return self._config.dirout # cspell:disable-line + + @property + def _name(self): + """str: Name of the pipeline job (directory of status file).""" + return self._config.name + + @property + def _hardware(self): + """{'local', 'slurm', 'eagle'}: Hardware option.""" + return self._config.hardware + + def _submit(self, step): + """Submit a step in the pipeline.""" + + command, f_config = self._get_command_config(step) + cmd = self._get_cmd(command, f_config) + + logger.info( + "Pipeline submitting: %r for job %r", command, self._config.name + ) + logger.debug("Pipeline submitting subprocess call:\n\t%r", cmd) + + try: + stderr = gaps.hpc.submit(cmd)[1] + except OSError as e: + logger.exception( + "Pipeline subprocess submission returned an error: \n%s", e + ) + raise e + + if stderr: + logger.warning("Subprocess received stderr: \n%s", stderr) + + def _get_cmd(self, command, f_config): + """Get the python cli call string.""" + + if command not in self.COMMANDS: + raise KeyError( + f"Could not recognize command {command!r}. " + f"Available commands are: {self.COMMANDS!r}" + ) from None + cmd = self.CMD_BASE.format(fp_config=f_config, command=command) + if self.verbose: + cmd += " -v" + + return cmd + +
+[docs] + @classmethod + def run(cls, pipeline, monitor=True, verbose=False): + """Run the reV-style pipeline. + + Parameters + ---------- + pipeline : str | dict + Pipeline config file path or dictionary. + monitor : bool, optional + Flag to perform continuous monitoring of the pipeline. + By default, `True`. + verbose : bool, optional + Flag to submit pipeline steps with "-v" flag for debug + logging. By default, `False`. + """ + + pipe = cls(pipeline, monitor=monitor, verbose=verbose) + pipe._main()
+
+ + + +
+[docs] +class BatchJob(gaps.batch.BatchJob): + """Legacy reV-like batch job framework. + + To use this class, simply override the following two attributes: + + PIPELINE_CLASS : `Pipeline` + Pipeline class with at least two class methods: `run` and + `cancel_all` The `run` method must take `pipeline_config`, + `monitor`, and `verbose` as arguments. + PIPELINE_BACKGROUND_METHOD : callable + Callable to run pipeline in the background with monitoring. + If you set this as a static method (e.g. + `PIPELINE_BACKGROUND_METHOD = staticmethod(my_callable)`), + then this function should take exactly two arguments: + `pipeline_config` and `verbose`. Otherwise, the function + should take three arguments, where the first is a reference + to this batch class, and the last two are the same as above. + + + """ + + @property + @abstractmethod + def PIPELINE_CLASS(self): + """str: Formattable string of the base pipeline CLI command.""" + raise NotImplementedError + + @property + @abstractmethod + def PIPELINE_BACKGROUND_METHOD(self): + """str: Formattable string of the base pipeline CLI command.""" + raise NotImplementedError + + def _run_pipelines(self, monitor_background=False, verbose=False): + """Run the reV pipeline modules for each batch job. + + Parameters + ---------- + monitor_background : bool + Flag to monitor all batch pipelines continuously in the + background using the nohup command. Note that the + stdout/stderr will not be captured, but you can set a + pipeline "log_file" to capture logs. + verbose : bool + Flag to turn on debug logging for the pipelines. + """ + + for sub_directory in self.sub_dirs: + pipeline_config = sub_directory / self._pipeline_fp.name + pipeline_config = pipeline_config.as_posix() + if not os.path.isfile(pipeline_config): + raise PipelineError( + f"Could not find pipeline config to run: " + f"{pipeline_config!r}" + ) + if monitor_background: + self.PIPELINE_BACKGROUND_METHOD( + pipeline_config, verbose=verbose + ) + else: + self.PIPELINE_CLASS.run( + pipeline_config, monitor=False, verbose=verbose + ) + + def _cancel_all(self): + """Cancel all reV pipeline modules for all batch jobs.""" + for sub_directory in self.sub_dirs: + pipeline_config = sub_directory / self._pipeline_fp.name + pipeline_config = pipeline_config.as_posix() + if os.path.isfile(pipeline_config): + self.PIPELINE_CLASS.cancel_all(pipeline_config) + +
+[docs] + @classmethod + def cancel_all(cls, config, verbose=False): + """Cancel all reV pipeline modules for all batch jobs. + + Parameters + ---------- + config : str + File path to batch config json or csv (str). + verbose : bool + Flag to turn on debug logging. + """ + + cls(config)._cancel_all()
+ + +
+[docs] + @classmethod + def delete_all(cls, config, verbose=False): + """Delete all reV batch sub job folders based on the job summary csv + in the batch config directory. + + Parameters + ---------- + config : str + File path to batch config json or csv (str). + verbose : bool + Flag to turn on debug logging. + """ + cls(config).delete()
+ + +
+[docs] + @classmethod + def run( + cls, + config, + dry_run=False, + delete=False, + monitor_background=False, + verbose=False, + ): + """Run the reV batch job from a config file. + + Parameters + ---------- + config : str + File path to config json or csv (str). + dry_run : bool + Flag to make job directories without running. + delete : bool + Flag to delete all batch job sub directories based on the job + summary csv in the batch config directory. + monitor_background : bool + Flag to monitor all batch pipelines continuously + in the background using the nohup command. Note that the + stdout/stderr will not be captured, but you can set a + pipeline "log_file" to capture logs. + verbose : bool + Flag to turn on debug logging for the pipelines. + """ + + b = cls(config) + if delete: + b.delete() + else: + b._make_job_dirs() + if not dry_run: + b._run_pipelines( + monitor_background=monitor_background, verbose=verbose + )
+
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/log.html b/_modules/gaps/log.html new file mode 100644 index 00000000..116e07d2 --- /dev/null +++ b/_modules/gaps/log.html @@ -0,0 +1,640 @@ + + + + + + + + + + gaps.log — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.log

+# -*- coding: utf-8 -*-
+"""Logging utilities.
+
+Some of the code in this module is borrowed from rex
+(https://github.com/NREL/rex).
+"""
+import sys
+import logging
+from functools import partial
+from pathlib import Path
+
+import psutil
+
+import rex
+import gaps
+
+FORMAT = "gaps (%(levelname)s) - [%(filename)s:%(lineno)d] : %(message)s"
+"""Default format for gaps logging."""
+
+
+
+[docs] +def log_versions(): + """Log package versions.""" + gaps.logger.info("Running with gaps version %s", gaps.__version__) + rex.utilities.log_versions(gaps.logger)
+ + + +
+[docs] +def log_mem(log_level="DEBUG"): + """Log the memory usage to the input logger object. + + Parameters + ---------- + logger : logging.Logger + Logger object to log memory message to. + log_level : str + DEBUG or INFO for different log levels for this log message. + + Returns + ------- + msg : str + Memory utilization log message string. + """ + mem = psutil.virtual_memory() + msg = ( + f"Memory utilization is {mem.used / (1024.0 ** 3):.3f} GB " + f"out of {mem.total / (1024.0 ** 3):.3f} GB total " + f"({mem.used / mem.total:.1%} used)" + ) + gaps.logger.log(logging.getLevelName(log_level), msg) + + return msg
+ + + +
+[docs] +def init_logger( + stream=True, + level="INFO", + file=None, + fmt=FORMAT, +): + """Initialize and setup logging instance. + + Parameters + ---------- + stream : bool, optional + Option to add a StreamHandler along with FileHandler. + By default `True`. + level : str, optional + Level of logging to capture. If multiple handlers/log_files are + requested in a single call of this function, the specified + logging level will be applied to all requested handlers. + By default, "INFO". + file : str | list, optional + Path to file that should be used for logging. This can also be + a list of filepaths to use for multiple log files. + By default, `None`. + fmt : str, optional + Format for loggings. By default `FORMAT`. + """ + + file = file or [] + if isinstance(file, str): + file = [file] + + handlers = [ + make_handler(partial(make_log_file_handler, f), level=level, fmt=fmt) + for f in file + ] + + if stream: + handlers.append( + make_handler(make_log_stream_handler, level=level, fmt=fmt) + ) + + add_handlers(handlers)
+ + + +
+[docs] +def make_handler(factory, level="INFO", fmt=FORMAT): + """Make a handler to add to a Logger instance. + + Parameters + ---------- + factory : callable + A callable function to create the default handler. + level : str, optional + A string representing the logging level for the handler. + By default "INFO". + fmt : str, optional + A string representing the formatting for logging calls. + By default `FORMAT`. + + Returns + ------- + handler : `logging.Handler` + Handler with the specified log level and format. + """ + + handler = factory() + + handler.setLevel(level.upper()) + + log_format = logging.Formatter(fmt) + handler.setFormatter(log_format) + + return handler
+ + + +
+[docs] +def make_log_file_handler(file_path): + """Make a file handler to add to a Logger instance. + + If the directory structure for `file_path` does not exist, it is + created before initializing the FileHandler. + + Parameters + ---------- + file_path : str, optional + Path to the output log file. + + Returns + ------- + handler : `logging.FileHandler` + File handler with `file_path` as the name. + """ + + log_file = Path(file_path).expanduser().resolve() + log_file.parent.mkdir(parents=True, exist_ok=True) + + handler = logging.FileHandler(log_file, mode="a") + handler.set_name(log_file.name) + + return handler
+ + + +
+[docs] +def make_log_stream_handler(): + """Make a file handler to add to a Logger instance. + + Returns + ------- + handler : `logging.StreamHandler` + Stream handler with name "stream". + """ + + handler = logging.StreamHandler(sys.stdout) + handler.set_name("stream") + + return handler
+ + + +
+[docs] +def add_handlers(handlers): + """Add handlers to logger ensuring they do not already exist. + + Parameters + ---------- + handlers : list + Handlers to add to logger instance. + """ + current_handlers = {h.name: h for h in gaps.logger.handlers} + for handler in handlers: + name = handler.name + if name not in current_handlers: + gaps.logger.addHandler(handler) + current_handlers.update({name: handler}) + else: + existing_handler = current_handlers[name] + if handler.level < existing_handler.level: + existing_handler.setLevel(handler.level)
+ + + + + + + + + + + +def __cls_name(obj): + """Format the class name of the input logger/handler object.""" + return str(obj.__class__)[8:-2] +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/pipeline.html b/_modules/gaps/pipeline.html new file mode 100644 index 00000000..af3529a7 --- /dev/null +++ b/_modules/gaps/pipeline.html @@ -0,0 +1,785 @@ + + + + + + + + + + gaps.pipeline — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.pipeline

+# -*- coding: utf-8 -*-
+"""
+GAPs Pipeline architecture.
+"""
+import time
+import logging
+from pathlib import Path
+from warnings import warn
+
+from gaps.status import Status, StatusOption, StatusField, HardwareOption
+from gaps.utilities import recursively_update_dict
+from gaps.config import load_config
+from gaps.exceptions import (
+    gapsConfigError,
+    gapsExecutionError,
+    gapsKeyError,
+    gapsValueError,
+)
+from gaps.warnings import gapsWarning
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class Pipeline: + """gaps pipeline execution framework.""" + + COMMANDS = {} + + def __init__(self, pipeline, monitor=True): + """ + + Parameters + ---------- + pipeline : path-like + Pipeline config file path. + monitor : bool, optional + Flag to perform continuous monitoring of the pipeline. + By default, ``True``. + + Raises + ------ + gapsConfigError + If "pipeline" key not in config file. + """ + self.monitor = monitor + self._out_dir = Path(pipeline).expanduser().parent.resolve() + self._out_dir = self._out_dir.as_posix() + + config = load_config(pipeline) + self._run_list = _check_pipeline(config) + self._init_status() + + def _init_status(self): + """Initialize the status json in the output directory.""" + status = self.status + for pipe_index, step in enumerate(self._run_list): + for command in step.keys(): + command_dict = { + command: {StatusField.PIPELINE_INDEX: pipe_index} + } + status.data = recursively_update_dict( + status.data, command_dict + ) + + _dump_sorted(status) + + @property + def status(self): + """:class:`~gaps.status.Status`: A gaps pipeline status object.""" + return Status(self._out_dir).update_from_all_job_files(purge=False) + + @property + def name(self): + """str: Name of the pipeline job (directory of status file).""" + return self.status.name + + def _cancel_all_jobs(self): + """Cancel all jobs in this pipeline.""" + status = self.status + for job_id, hardware in zip(status.job_ids, status.job_hardware): + manager = HardwareOption(hardware).manager + if manager is not None: + manager.cancel(job_id) + logger.info("Pipeline job %r cancelled.", self.name) + + def _main(self): + """Iterate through run list submitting steps while monitoring status""" + + for step, command in enumerate(self._run_list): + step_status = self._status(step) + + if step_status == StatusOption.SUCCESSFUL: + logger.debug("Successful: %r.", list(command.keys())[0]) + continue + + # the submit function handles individual job success/failure + self._submit(step) + + # do not enter while loop for continuous monitoring + if not self.monitor: + break + + time.sleep(1) + try: + self._monitor(step) + except Exception as error: + self._cancel_all_jobs() + raise error + + else: + logger.info("Pipeline job %r is complete.", self.name) + logger.debug("Output directory is: %r", self._out_dir) + + def _monitor(self, step, seconds=5, step_status=StatusOption.RUNNING): + """Continuously monitor job until success or failure.""" + + while step_status.is_processing: + time.sleep(seconds) + HardwareOption.reset_all_cached_queries() + step_status = self._status(step) + + if step_status == StatusOption.FAILED: + command, f_config = self._get_command_config(step) + raise gapsExecutionError( + f"Pipeline failed at step {step}: {command!r} " + f"for {f_config!r}" + ) + + def _submit(self, step): + """Submit a step in the pipeline.""" + + command, f_config = self._get_command_config(step) + if command not in self.COMMANDS: + raise gapsKeyError( + f"Could not recognize command {command!r}. " + f"Available commands are: {set(self.COMMANDS)!r}" + ) from None + + self.COMMANDS[command].callback(f_config) + + def _status(self, step): + """Get a pipeline step status.""" + + command, _ = self._get_command_config(step) + status = self.status + submitted = _check_jobs_submitted(status, command) + if not submitted: + return StatusOption.NOT_SUBMITTED + + return self._get_command_return_code(status, command) + + def _get_command_return_code(self, status, command): + """Get a return code for a command based on a status object. + + Note that it is assumed a job has been submitted before this + function is called, otherwise the return values make no sense! + """ + + # initialize return code array + arr = [] + check_failed = False + status.update_from_all_job_files(check_hardware=True) + + if command not in status.data: + # assume running + arr = [StatusOption.RUNNING] + else: + for job_name, job_info in status.data[command].items(): + if job_name == StatusField.PIPELINE_INDEX: + continue + + job_status = job_info.get(StatusField.JOB_STATUS) + + if job_status == "successful": + arr.append(StatusOption.SUCCESSFUL) + elif job_status == "failed": + arr.append(StatusOption.FAILED) + check_failed = True + elif job_status == "submitted": + arr.append(StatusOption.SUBMITTED) + elif job_status == "not submitted": + arr.append(StatusOption.NOT_SUBMITTED) + elif job_status == "running": + arr.append(StatusOption.RUNNING) + elif job_status is None: + arr.append(StatusOption.COMPLETE) + else: + msg = f"Job status code {job_status!r} not understood!" + raise gapsValueError(msg) + + _dump_sorted(status) + + return_code = _parse_code_array(arr) + + fail_str = "" + if return_code != StatusOption.FAILED and check_failed: + fail_str = ", but some jobs have failed" + logger.info( + "CLI command %r for job %r %s%s. (%s)", + command, + self.name, + return_code.with_verb, # pylint: disable=no-member + fail_str, + time.ctime(), + ) + + return return_code + + def _get_command_config(self, step): + """Get the (command, config) key pair.""" + return list(self._run_list[step].items())[0] + +
+[docs] + @classmethod + def cancel_all(cls, pipeline): + """Cancel all jobs corresponding to pipeline. + + Parameters + ---------- + pipeline : path-like + Pipeline config file path. + """ + cls(pipeline)._cancel_all_jobs()
+ + +
+[docs] + @classmethod + def run(cls, pipeline, monitor=True): + """Run the pipeline. + + Parameters + ---------- + pipeline : path-like + Pipeline config file path. + monitor : bool + Flag to perform continuous monitoring of the pipeline. + """ + cls(pipeline, monitor=monitor)._main()
+
+ + + +def _check_pipeline(config): + """Check pipeline steps input.""" + + if "pipeline" not in config: + raise gapsConfigError( + "Could not find required key " + '"pipeline" ' + "in the pipeline config." + ) + + pipeline = config["pipeline"] + + if not isinstance(pipeline, list): + raise gapsConfigError( + 'Config arg "pipeline" must be a list of ' + f"{{command: f_config}} pairs, but received {type(pipeline)}." + ) + + for step_dict in pipeline: + for f_config in step_dict.values(): + if not Path(f_config).expanduser().resolve().exists(): + raise gapsConfigError( + "Pipeline step depends on non-existent " + f"file: {f_config}" + ) + + return pipeline + + +def _parse_code_array(arr): + """Parse array of return codes to get single return code for command.""" + + # check to see if all have completed, or any have failed + all_successful = all(status == StatusOption.SUCCESSFUL for status in arr) + all_completed = not any(status.is_processing for status in arr) + any_failed = any(status == StatusOption.FAILED for status in arr) + any_submitted = any(status == StatusOption.SUBMITTED for status in arr) + + # only return success if all have succeeded. + if all_successful: + return StatusOption.SUCCESSFUL + # Only return failed when all have finished. + if all_completed and any_failed: + return StatusOption.FAILED + # only return complete when all have completed + # (but some should have succeeded or failed) + if all_completed: + return StatusOption.COMPLETE + # only return "running" if all jobs running, else return "submitted" + if any_submitted: + return StatusOption.SUBMITTED + # otherwise, all jobs are still running + return StatusOption.RUNNING + + +def _check_jobs_submitted(status, command): + """Check whether jobs have been submitted for a given command.""" + return any( + job != StatusField.PIPELINE_INDEX for job in status.data.get(command) + ) + + +def _dump_sorted(status): + """Dump status dict after sorting on PIPELINE_INDEX.""" + pi_key = StatusField.PIPELINE_INDEX + + def _sort_key(status_entry): + """Sort on pipeline index and key name, putting non-pipeline at top.""" + try: + pipe_index = status[status_entry].get(pi_key, -1) + except AttributeError: + pipe_index = -1 + return pipe_index, status_entry + + sorted_keys = sorted(status, key=_sort_key) + status.data = {k: status[k] for k in sorted_keys} + status.dump() + + +
+[docs] +def parse_previous_status(status_dir, command, key=StatusField.OUT_FILE): + """Parse key from job status(es) from the previous pipeline step. + + This command DOES NOT check the HPC queue for jobs and therefore + DOES NOT update the status of previously running jobs that have + errored out of the HPC queue. For best results, ensure that all + previous steps of a pipeline have finished processing before calling + this function. + + Parameters + ---------- + status_dir : path-like + Directory containing the status file to parse. + command : str + Current CLI command (i.e. current pipeline step). + key : `StatusField` | str, optional + Parsing target of previous command. By default, + `StatusField.OUT_FILE`. + + Returns + ------- + out : list + Arguments parsed from the status file in status_dir from + the command preceding the input command arg. This list is + empty if the `key` is not found in the job status, or if + the previous step index does not exist in status. + + Raises + ------ + gapsKeyError + If ``command`` not in status. + """ + + status = Status(status_dir).update_from_all_job_files(purge=False) + + if ( + command not in status + or StatusField.PIPELINE_INDEX not in status[command] + ): + msg = ( + f"Could not parse data for command {command!r} from status " + f"file in {status_dir!r}" + ) + raise gapsKeyError(msg) + + index = int(status.data[command][StatusField.PIPELINE_INDEX]) - 1 + + if index < 0: + index = 0 + msg = ( + f"CLI command {command!r} is attempting to parse a previous " + f"pipeline step, but it appears to be the first step. " + f"Attempting to parse data from {command!r}." + ) + warn(msg, gapsWarning) + + for cmd, status in status.items(): + try: + command_index = status.get(StatusField.PIPELINE_INDEX) + except AttributeError: + continue + + if str(index) == str(command_index): + return Status.parse_command_status(status_dir, cmd, key) + + return []
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/project_points.html b/_modules/gaps/project_points.html new file mode 100644 index 00000000..b2f6348e --- /dev/null +++ b/_modules/gaps/project_points.html @@ -0,0 +1,718 @@ + + + + + + + + + + gaps.project_points — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.project_points

+# -*- coding: utf-8 -*-
+"""
+GAPs Project Points
+"""
+import logging
+from warnings import warn
+
+import numpy as np
+import pandas as pd
+
+from rex.utilities import parse_table
+from gaps.exceptions import (
+    gapsIndexError,
+    gapsKeyError,
+    gapsRuntimeError,
+)
+from gaps.utilities import project_points_from_container_or_slice
+
+
+logger = logging.getLogger(__name__)
+
+
+def _parse_sites(points):
+    """Parse project points from list or slice
+
+    Parameters
+    ----------
+    points : int | str | pd.DataFrame | slice | list
+        Slice specifying project points, string pointing to a project
+        points csv, or a DataFrame containing the effective csv contents.
+        Can also be a single integer site value.
+
+    Returns
+    -------
+    df : pd.DataFrame
+        DataFrame of sites (gids) with corresponding args
+
+    Raises
+    ------
+    gapsRuntimeError
+        If points not flat.
+    """
+    try:
+        points = project_points_from_container_or_slice(points)
+    except TypeError as err:
+        msg = (
+            f"Cannot parse points data from {points}. If this input is a "
+            "container, please ensure that the container is flat (no "
+            "nested gid values)."
+        )
+        raise gapsRuntimeError(msg) from err
+
+    return pd.DataFrame({"gid": points})
+
+
+
+[docs] +class ProjectPoints: + """Class to manage site and SAM input configuration requests.""" + + def __init__(self, points, **kwargs): + """Initialize ProjectPoints. + + Parameters + ---------- + points : int | slice | list | tuple | str | pd.DataFrame | dict + Slice specifying project points, string pointing to a + project points csv, or a DataFrame containing the effective + csv contents. Can also be a single integer site value. + **kwargs + Keyword-argument pairs to add to project points DataFrame. + The key should be the column name, and the value should + be the value to add under the column name. Values must + either be a scalar or match the length of the DataFrame + resulting from the `points` input. + """ + self._df = self.split_range = None + self._parse_points(points, **kwargs) + + def _parse_points(self, points, **kwargs): + """Generate the project points df from inputs + + Parameters + ---------- + points : int | str | pd.DataFrame | slice | list | dict + Slice specifying project points, string pointing to a + project points csv, or a DataFrame containing the effective + csv contents. Can also be a single integer site value. + + Returns + ------- + df : pd.DataFrame + DataFrame of sites (gids) with corresponding args + """ + try: + self._df = parse_table(points) + except ValueError: + self._df = _parse_sites(points) + + if "gid" not in self._df.columns: + raise gapsKeyError( + 'Project points data must contain "gid" column.' + ) + + for key, val in kwargs.items(): + self._df[key] = val + + self.split_range = (self._df.iloc[0].name, self._df.iloc[-1].name + 1) + + gids = self._df["gid"].values + if not np.array_equal(np.sort(gids), gids): + msg = ( + "Points are not in sequential order and will be sorted! The " + 'original order is being preserved under column "points_order"' + ) + logger.warning(msg) + warn(msg) + self._df["points_order"] = self._df.index.values + self._df = self._df.sort_values("gid") + + self._df = self._df.reset_index(drop=True) + + def __iter__(self): + for __, row in self.df.iterrows(): + yield row + + def __getitem__(self, site_id): + """Get the dictionary for the requested site. + + Parameters + ---------- + site_id : int | str + Site number (gid) of interest (typically the resource gid). + + Returns + ------- + site : pd.Series + Pandas Series containing information for the site with the + requested site_id. + """ + + if site_id not in self.df["gid"].values: + msg = ( + f"Site {site_id} not found in this instance of " + f"ProjectPoints. Available sites include: {self.gids}" + ) + raise gapsKeyError(msg) + + return self.df.loc[self.df["gid"] == site_id].iloc[0] + + def __repr__(self): + return ( + f"{self.__class__.__name__} with {len(self)} sites from gid " + f"{self.gids[0]} through {self.gids[-1]}" + ) + + def __len__(self): + """Length of this object is the number of sites.""" + return len(self.gids) + + @property + def df(self): + """pd.DataFrame: Project points DataFrame of site info.""" + return self._df + + @property + def gids(self): + """list: Gids (resource file index values) of sites.""" + return self.df["gid"].values.tolist() + + @property + def sites_as_slice(self): + """list | slice: Sites in slice format or list if non-sequential.""" + # try_slice is what the sites list would be if it is sequential + if len(self.gids) > 1: + try_step = self.gids[1] - self.gids[0] + else: + try_step = 1 + + try_slice = slice(self.gids[0], self.gids[-1] + 1, try_step) + try_list = list(range(*try_slice.indices(try_slice.stop))) + + if self.gids == try_list: + return try_slice + + # cannot be converted to a sequential slice, return list + return self.gids + +
+[docs] + def index(self, gid): + """Index location (iloc not loc) for a resource gid. + + Parameters + ---------- + gid : int + Resource GID found in the project points gid column. + + Returns + ------- + ind : int + Row index of gid in the project points DataFrame. + """ + if gid not in self._df["gid"].values: + msg = ( + f"Requested resource gid {gid} is not present in the project " + f"points DataFrame. Cannot return row index." + ) + raise gapsIndexError(msg) + + ind = np.where(self._df["gid"] == gid)[0][0] + + return ind
+ + +
+[docs] + def join_df(self, df2, key="gid"): + """Join df2 to the _df attribute using _df's gid as the join key. + + This can be used to add site-specific data to the project_points, + taking advantage of the `ProjectPoints` iterator/split functions + such that only the relevant site data is passed to the analysis + functions. + + Parameters + ---------- + df2 : pd.DataFrame + DataFrame to be joined to the :attr:`df` attribute (this + instance of project points DataFrame). This likely contains + site-specific inputs that are to be passed to parallel + workers. + key : str + Primary key of df2 to be joined to the :attr:`df` attribute + (this instance of the project points DataFrame). Primary key + of the self._df attribute is fixed as the gid column. + """ + # ensure df2 doesn't have any duplicate columns for suffix reasons. + df2_cols = [c for c in df2.columns if c not in self._df or c == key] + self._df = pd.merge( + self._df, + df2[df2_cols], + how="left", + left_on="gid", + right_on=key, + copy=False, + validate="1:1", + )
+ + +
+[docs] + def get_sites_from_key(self, key, value): + """Get a site list for which the key equals the value. + + Parameters + ---------- + key : str + Name of key (column) in project points DataFrame. + value : int | float | str | obj + Value to look for under the ``key`` column. + + Returns + ------- + sites : lis of ints + List of sites (GID values) associated with the requested key + and value. If the key or value does not exist, an empty list + is returned. + """ + if key not in self.df: + return [] + + return list(self.df.loc[self.df[key] == value, "gid"].values)
+ + +
+[docs] + def split(self, sites_per_split=100): + """Split the project points into sub-groups by number of sites. + + Parameters + ---------- + sites_per_split : int, optional + Number of points in each sub-group. By default, `100`. + + Yields + ------ + ProjectPoints + A new ProjectPoints instance with up to `sites_per_split` + number of points. + """ + for ind in range(0, len(self.df), sites_per_split): + yield self.__class__(self.df.iloc[ind : ind + sites_per_split])
+ + +
+[docs] + @classmethod + def from_range(cls, split_range, points, **kwargs): + """Create a ProjectPoints instance from a range indices. + + Parameters + ---------- + split_range : 2-tuple + Tuple containing the start and end index (iloc, not loc). + Last index is not included. + points : int | slice | list | tuple | str | pd.DataFrame | dict + Slice specifying project points, string pointing to a + project points csv, or a DataFrame containing the effective + csv contents. Can also be a single integer site value. + **kwargs + Keyword-argument pairs to add to project points DataFrame. + The key should be the column name, and the value should + be the value to add under the column name. Values must + either be a scalar or match the length of the DataFrame + resulting from the `points` input. + + Returns + ------- + ProjectPoints + A new ProjectPoints instance with a range sampled from the + points input according to ``split_range``. + """ + pp = cls(points, **kwargs) + start, end = split_range + return cls(pp.df.iloc[start:end])
+
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/status.html b/_modules/gaps/status.html new file mode 100644 index 00000000..b7a4caaa --- /dev/null +++ b/_modules/gaps/status.html @@ -0,0 +1,1305 @@ + + + + + + + + + + gaps.status — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.status

+# -*- coding: utf-8 -*-
+"""gaps Job status manager. """
+import json
+import time
+import shutil
+import pprint
+import logging
+import datetime as dt
+from pathlib import Path
+from copy import deepcopy
+from warnings import warn
+from itertools import chain
+from collections import UserDict, abc
+
+import pandas as pd
+import numpy as np
+
+from rex.utilities import safe_json_load
+from gaps.hpc import SLURM, PBS
+from gaps.config import ConfigType
+from gaps.utilities import recursively_update_dict, CaseInsensitiveEnum
+from gaps.exceptions import gapsKeyError, gapsTypeError
+from gaps.warnings import gapsWarning
+
+
+logger = logging.getLogger(__name__)
+JOB_STATUS_FILE = "jobstatus_{}.json"
+MONITOR_PID_FILE = "monitor_pid.json"
+NAMED_STATUS_FILE = "{}_status.json"
+DT_FMT = "%d-%b-%Y %H:%M:%S"
+
+
+
+[docs] +class StatusField(CaseInsensitiveEnum): + """A collection of required status fields in a status file.""" + + JOB_ID = "job_id" + JOB_STATUS = "job_status" + PIPELINE_INDEX = "pipeline_index" + HARDWARE = "hardware" + QOS = "qos" + OUT_FILE = "out_file" + TIME_SUBMITTED = "time_submitted" + TIME_START = "time_start" + TIME_END = "time_end" + TOTAL_RUNTIME = "total_runtime" + RUNTIME_SECONDS = "runtime_seconds" + MONITOR_PID = "monitor_pid"
+ + + +
+[docs] +class HardwareOption(CaseInsensitiveEnum): + """A collection of hardware options.""" + + LOCAL = "local" + KESTREL = "kestrel" + EAGLE = "eagle" + PEREGRINE = "peregrine" + + @classmethod + def _new_post_hook(cls, obj, value): + """Hook for post-processing after __new__""" + obj.is_hpc = value != "local" + if value in {"eagle", "kestrel"}: + obj.manager = SLURM() + obj.check_status_using_job_id = ( + obj.manager.check_status_using_job_id + ) + obj.supports_categorical_qos = True + obj.charge_factor = 3 + elif value in {"peregrine"}: + obj.manager = PBS() + obj.check_status_using_job_id = ( + obj.manager.check_status_using_job_id + ) + obj.supports_categorical_qos = False + obj.charge_factor = 1 + else: + obj.manager = None + obj.check_status_using_job_id = lambda *__, **___: None + obj.supports_categorical_qos = False + obj.charge_factor = 0 + return obj + + # pylint: disable=no-member +
+[docs] + @classmethod + def reset_all_cached_queries(cls): + """Reset all cached hardware queries.""" + cls.EAGLE.manager.reset_query_cache() + cls.KESTREL.manager.reset_query_cache() + cls.PEREGRINE.manager.reset_query_cache()
+
+ + + +
+[docs] +class StatusOption(CaseInsensitiveEnum): + """A collection of job status options.""" + + NOT_SUBMITTED = "not submitted" + SUBMITTED = "submitted" + RUNNING = "running" + SUCCESSFUL = "successful" + FAILED = "failed" + COMPLETE = "complete" + + @classmethod + def _new_post_hook(cls, obj, value): + """Hook for post-processing after __new__""" + verb = "has" if value == "failed" else "is" + obj.with_verb = " ".join([verb, value]) + obj.is_processing = value in {"submitted", "running"} + return obj
+ + + +
+[docs] +class QOSOption(CaseInsensitiveEnum): + """A collection of job QOS options.""" + + NORMAL = "normal" + HIGH = "high" + UNSPECIFIED = "unspecified" + + @classmethod + def _new_post_hook(cls, obj, value): + """Hook for post-processing after __new__""" + if value in {"high"}: + obj.charge_factor = 2 + else: + obj.charge_factor = 1 + return obj
+ + + +# pylint: disable=too-few-public-methods +
+[docs] +class HardwareStatusRetriever: + """Query hardware for job status.""" + + def __init__(self, subprocess_manager=None): + """Initialize `HardwareStatusRetriever`. + + Parameters + ---------- + subprocess_manager : PBS | SLURM | None, optional + Optional initialized subprocess manager to use to check job + statuses. This can be input with cached queue data to avoid + constantly querying the HPC. By default, `None`. + """ + self.subprocess_manager = subprocess_manager + + def __getitem__(self, key): + """Get the job status using pre-defined hardware-specific methods.""" + job_id, hardware = key + if not job_id: + return None + + if self.subprocess_manager is not None: + return self.subprocess_manager.check_status_using_job_id(job_id) + + return _validate_hardware(hardware).check_status_using_job_id(job_id)
+ + + +
+[docs] +class Status(UserDict): + """Base class for data pipeline health and status information. + + This class facilitates the creating of individual job status files + which the main `Status` object can collect. + """ + + _DF_COLUMNS = [ + StatusField.JOB_STATUS.value, + StatusField.PIPELINE_INDEX.value, + StatusField.JOB_ID.value, + StatusField.TIME_SUBMITTED.value, + StatusField.TIME_START.value, + StatusField.TIME_END.value, + StatusField.TOTAL_RUNTIME.value, + StatusField.HARDWARE.value, + StatusField.QOS.value, + ] + + def __init__(self, status_dir): + """Initialize `Status`. + + Parameters + ---------- + status_dir : path-like + Directory containing zero or more job json status files. + """ + super().__init__() + self.dir = Path(status_dir).expanduser().resolve() + self._validate_dir() + self.name = self.dir.name + self._fpath = self.dir / NAMED_STATUS_FILE.format(self.name) + self.data = _load(self._fpath) + + def _validate_dir(self): + """Validate that the directory name is not a config file type.""" + for file_type in ConfigType.members_as_str(): + if self.dir.name.endswith(f".{file_type}"): + raise gapsTypeError( + f"Need a directory containing a status {file_type}, " + f"not a status {file_type}: {self.dir!r}" + ) + + @property + def job_ids(self): + """list: Flat list of job ids.""" + return _get_attr_flat_list(self.data, key=StatusField.JOB_ID) + + @property + def job_hardware(self): + """list: Flat list of job hardware options.""" + return _get_attr_flat_list(self.data, key=StatusField.HARDWARE) + +
+[docs] + def as_df(self, commands=None, index_name="job_name", include_cols=None): + """Format status as pandas DataFrame. + + Parameters + ---------- + commands : container, optional + A container of command names to collect. If `None`, all + commands in the status file are collected. + By default, `None`. + index_name : str, optional + Name to assign to index of DataFrame. + By default, `"job_name"`. + + Returns + ------- + pd.DataFrame + Pandas DataFrame containing status information. + """ + include_cols = include_cols or [] + output_cols = self._DF_COLUMNS + list(include_cols) + + self.update_from_all_job_files(purge=False, check_hardware=True) + if not self.data: + return pd.DataFrame(columns=output_cols) + + requested_commands = commands or self.keys() + commands = [] + for command, status in self.items(): + if command not in requested_commands: + continue + try: + command_index = status.pop(StatusField.PIPELINE_INDEX, None) + except (AttributeError, TypeError): + continue + if not status: + status = {command: {}} + try: + command_df = pd.DataFrame(status).T + except ValueError: + continue + command_df[f"{StatusField.PIPELINE_INDEX}"] = command_index + commands.append(command_df) + + try: + command_df = pd.concat(commands, sort=False) + except ValueError: + return pd.DataFrame(columns=output_cols) + + for field in chain(StatusField, include_cols): + if field not in command_df.columns: + command_df[f"{field}"] = np.nan + + command_df.loc[ + command_df[StatusField.JOB_STATUS].isna(), + StatusField.JOB_STATUS.value, + ] = StatusOption.NOT_SUBMITTED.value + + command_df = _add_elapsed_time(command_df) + + command_df.index.name = index_name + command_df = command_df[output_cols] + return command_df
+ + +
+[docs] + def reload(self): + """Re-load the data from disk.""" + self.data = _load(self._fpath)
+ + +
+[docs] + def dump(self): + """Dump status json w/ backup file in case process gets killed.""" + + self._fpath.parent.mkdir(parents=True, exist_ok=True) + + backup = self._fpath.name.replace(".json", "_backup.json") + backup = self._fpath.parent / backup + if self._fpath.exists(): + shutil.copy(self._fpath, backup) + + with open(self._fpath, "w") as status: + json.dump(self.data, status, indent=4, separators=(",", ": ")) + + backup.unlink(missing_ok=True)
+ + +
+[docs] + def update_from_all_job_files(self, check_hardware=False, purge=True): + """Update status from all single-job job status files. + + This method loads all single-job status files in the target + directory and updates the `Status` object with the single-job + statuses. + + Parameters + ---------- + check_hardware : bool, optional + Option to check hardware status for job failures for jobs + with a "running" status. This is useful because the + "running" status may not be correctly updated if the job + terminates abnormally on the HPC. By default, `False`. + purge : bool, optional + Option to purge the individual status files. + By default, `True`. + + Returns + ------- + `Status` + This instance of `Status` with updated job properties. + """ + file_pattern = JOB_STATUS_FILE.format("*") + for file_ in Path(self.dir).glob(file_pattern): + status = _safe_load(file_, purge=purge) + self.data = recursively_update_dict(self.data, status) + + monitor_pid_file = Path(self.dir) / MONITOR_PID_FILE + if monitor_pid_file.exists(): + monitor_pid_info = _safe_load(monitor_pid_file, purge=purge) + self.data.update(monitor_pid_info) + + if check_hardware: + self._update_from_hardware() + + if purge: + self.dump() + + return self
+ + + def _update_from_hardware(self): + """Check all job status against hardware status.""" + hardware_status_retriever = HardwareStatusRetriever() + for job_data in self._job_statuses(): + self._update_job_status_from_hardware( + job_data, hardware_status_retriever + ) + + def _job_statuses(self): + """Iterate over job status dicts. Ignore other info in self.data""" + for status in self.values(): + try: + yield from _iter_job_status(status) + except AttributeError: + continue + + @staticmethod + def _update_job_status_from_hardware(job_data, hardware_status_retriever): + """Update job status to failed if it is processing but DNE on HPC.""" + + status = job_data.get( + StatusField.JOB_STATUS, StatusOption.NOT_SUBMITTED + ) + try: + if not StatusOption(status).is_processing: + return + except ValueError: + pass + + job_id = job_data.get(StatusField.JOB_ID, None) + job_hardware = job_data.get(StatusField.HARDWARE, None) + + # get job status from hardware + current = hardware_status_retriever[job_id, job_hardware] + # No current status and job was not successful: failed! + if current is None: + job_data[StatusField.JOB_STATUS] = StatusOption.FAILED + +
+[docs] + def update_job_status( + self, command, job_name, hardware_status_retriever=None + ): + """Update single-job job status from single-job job status file. + + If the status for a given command/job name combination is not + found, the status object remains unchanged. + + Parameters + ---------- + command : str + CLI command that the job belongs to. + job_name : str + Unique job name identification. + hardware_status_retriever : `HardwareStatusRetriever`, optional + Hardware status retriever. By default, `None`, which creates + an instance internally. + """ + hardware_status_retriever = ( + hardware_status_retriever or HardwareStatusRetriever() + ) + # look for job completion file. + current = _load_job_file(self.dir, job_name) + + # Update status data dict and file if job file was found + if current is not None: + self.data = recursively_update_dict(self.data, current) + + # check job status via hardware if job file not found. + elif command in self.data: + # job exists + if job_name in self.data[command]: + self._update_job_status_from_hardware( + self.data[command][job_name], hardware_status_retriever + ) + # job does not yet exist + else: + self.data[command][job_name] = { + StatusField.JOB_STATUS: StatusOption.NOT_SUBMITTED + } + + self.dump()
+ + + def _retrieve_job_status( + self, command, job_name, hardware_status_retriever + ): + """Update and retrieve job status.""" + if job_name.endswith(".h5"): + job_name = job_name.replace(".h5", "") + + self.update_job_status(command, job_name, hardware_status_retriever) + + try: + job_data = self[command][job_name] + except KeyError: + return None + + return job_data.get(StatusField.JOB_STATUS) + +
+[docs] + @staticmethod + def record_monitor_pid(status_dir, pid): + """Make a json file recording the PID of the monitor process. + + Parameters + ---------- + status_dir : path-like + Directory to put json status file. + pid : int + PID of the monitoring process. + """ + pid = {StatusField.MONITOR_PID: pid} + fpath = Path(status_dir) / MONITOR_PID_FILE + with open(fpath, "w") as pid_file: + json.dump( + pid, + pid_file, + sort_keys=True, + indent=4, + separators=(",", ": "), + )
+ + +
+[docs] + @staticmethod + def make_single_job_file(status_dir, command, job_name, attrs): + """Make a json file recording the status of a single job. + + This method should primarily be used by HPC nodes to mark the + status of individual jobs. + + Parameters + ---------- + status_dir : path-like + Directory to put json status file. + command : str + CLI command that the job belongs to. + job_name : str + Unique job name identification. + attrs : dict + Dictionary of job attributes that represent the job status + attributes. + """ + if job_name.endswith(".h5"): + job_name = job_name.replace(".h5", "") + + status = {command: {job_name: attrs}} + fpath = Path(status_dir) / JOB_STATUS_FILE.format(job_name) + with open(fpath, "w") as job_status: + json.dump( + status, + job_status, + sort_keys=True, + indent=4, + separators=(",", ": "), + )
+ + +
+[docs] + @classmethod + def mark_job_as_submitted( + cls, status_dir, command, job_name, replace=False, job_attrs=None + ): + """Mark a job in the status json as "submitted". + + Status json does not have to exist - it is created if missing. + If it exists, the job will only be updated if it does not + exist (i.e. not submitted), unless replace is set to `True`. + + Parameters + ---------- + status_dir : path-like + Directory containing json status file. + command : str + CLI command that the job belongs to. + job_name : str + Unique job name identification. + replace : bool, optional + Flag to force replacement of pre-existing job status. + By default, `False`. + job_attrs : dict, optional + Job attributes. Should include 'job_id' if running on HPC. + By default, `None`. + """ + if job_name.endswith(".h5"): + job_name = job_name.replace(".h5", "") + + obj = cls(status_dir) + + job_attrs = job_attrs or {} + hardware = job_attrs.get(StatusField.HARDWARE) + try: + job_on_hpc = HardwareOption(hardware).is_hpc + except ValueError: + job_on_hpc = False + + if job_on_hpc and StatusField.JOB_ID not in job_attrs: + msg = ( + f'Key "job_id" should be in kwargs for {job_name!r} if ' + f"adding job from an HPC node." + ) + warn(msg, gapsWarning) + + exists = obj.job_exists(status_dir, job_name, command=command) + + if exists and not replace: + return + + job_status = job_attrs.get(StatusField.JOB_STATUS) + if job_status != StatusOption.SUBMITTED: + if job_status is not None: + msg = ( + f"Attempting to mark a job as submitted but included a " + f"{StatusField.JOB_STATUS} value of {job_status!r} in the " + f"job_attrs dictionary! Setting the job status to " + f"{StatusOption.SUBMITTED!r} before writing." + ) + warn(msg, gapsWarning) + job_attrs[StatusField.JOB_STATUS] = StatusOption.SUBMITTED + + if command not in obj.data: + obj.data[command] = {job_name: job_attrs} + else: + obj.data[command][job_name] = job_attrs + + obj.dump()
+ + +
+[docs] + @classmethod + def job_exists(cls, status_dir, job_name, command=None): + """Check whether a job exists and return a bool. + + This method will return `True` if the job name is found as a + key in the dictionary under the `command` keyword, or any + command if a `None` value is passed. + + Parameters + ---------- + status_dir : str + Directory containing json status file. + job_name : str + Unique job name identification. + command : str, optional + CLI command that the job belongs to. By default, `None`, + which checks all commands for the job name. + + Returns + ------- + exists : bool + `True` if the job exists in the status json. + """ + if job_name.endswith(".h5"): + job_name = job_name.replace(".h5", "") + + obj = cls(status_dir).update_from_all_job_files(purge=False) + if not obj.data: + return False + if command is not None: + jobs = [obj.data.get(command)] + else: + jobs = obj.data.values() + + for job in jobs: + if not job: + continue + for name in job.keys(): + if name == job_name: + return True + + return False
+ + +
+[docs] + @classmethod + def retrieve_job_status( + cls, + status_dir, + command, + job_name, + subprocess_manager=None, + ): + """Update and retrieve job status. + + Parameters + ---------- + status_dir : str + Directory containing json status file. + command : str + CLI command that the job belongs to. + job_name : str + Unique job name identification. + subprocess_manager : None | SLURM, optional + Optional initialized subprocess manager to use to check job + statuses. This can be input with cached queue data to avoid + constantly querying the HPC. + + Returns + ------- + status : str | None + Status string or `None` if job/command not found. + """ + hsr = HardwareStatusRetriever(subprocess_manager) + return cls(status_dir)._retrieve_job_status(command, job_name, hsr)
+ + +
+[docs] + @classmethod + def parse_command_status( + cls, + status_dir, + command, + key=StatusField.OUT_FILE, + ): + """Parse key from job status(es) from the given command. + + This command DOES NOT check the HPC queue for jobs and therefore + DOES NOT update the status of previously running jobs that have + errored out of the HPC queue. + + Parameters + ---------- + status_dir : path-like + Directory containing the status file to parse. + command : str + Target CLI command to parse. + key : StatusField | str, optional + Parsing target of previous command. By default, + `StatusField.OUT_FILE`. + + Returns + ------- + list + Arguments parsed from the status file in status_dir from + the input command. This list is empty if the `key` is not + found in the job status, or if the command does not exist in + status. + """ + status = cls(status_dir).update_from_all_job_files(purge=False) + command_status = status.get(command, {}) + command_status.pop(StatusField.PIPELINE_INDEX, None) + return _get_attr_flat_list(command_status, key=key)
+
+ + + +
+[docs] +class StatusUpdates: + """Context manager to track run function progress. + + When this context is entered, a status file is written for the given + command/job combination, with the given job attributes. The job + status is set to "running", and the start time is recorded. When the + context is exited, another status file is written with the end time + and total runtime values added. The status is also set to + "successful", unless an uncaught exception was raised during the + function runtime, in which case the status is set to "failed". If + the `out_file` attribute of this context manager is set before the + context is exited, that value is also written to the status file. + """ + + def __init__(self, directory, command, job_name, job_attrs): + """Initialize `StatusUpdates`. + + Parameters + ---------- + directory : path-like + Directory to write status files to. + command : str + Name of the command being run. + job_name : str + Name of the job being run. + job_attrs : dict + A dictionary containing job attributes that should be + written to the status file. + """ + self.directory = directory + self.command = command + self.job_name = job_name + self.job_attrs = deepcopy(job_attrs) + self.start_time = None + self.out_file = None + + def __enter__(self): + logger.debug( + "Received job attributes: %s", + pprint.pformat(self.job_attrs, indent=4), + ) + + self.start_time = dt.datetime.now() + self.job_attrs.update( + { + StatusField.JOB_STATUS: StatusOption.RUNNING, + StatusField.TIME_START: self.start_time.strftime(DT_FMT), + } + ) + Status.make_single_job_file( + self.directory, + command=self.command, + job_name=self.job_name, + attrs=self.job_attrs, + ) + return self + + def __exit__(self, exc_type, exc, traceback): + end_time = dt.datetime.now() + time_elapsed_s = (end_time - self.start_time).total_seconds() + time_elapsed = _elapsed_time_as_str(time_elapsed_s) + self.job_attrs.update( + { + StatusField.TIME_END: end_time.strftime(DT_FMT), + StatusField.TOTAL_RUNTIME: time_elapsed, + StatusField.RUNTIME_SECONDS: time_elapsed_s, + } + ) + if exc is None: + self.job_attrs.update( + { + StatusField.OUT_FILE: self.out_file, + StatusField.JOB_STATUS: StatusOption.SUCCESSFUL, + } + ) + logger.info( + "Command %r complete. Time elapsed: %s. " + "Target output file: %r", + self.command, + time_elapsed, + self.out_file, + ) + else: + self.job_attrs.update( + {StatusField.JOB_STATUS: StatusOption.FAILED} + ) + logger.info("Command %r failed in %s", self.command, time_elapsed) + + Status.make_single_job_file( + self.directory, + command=self.command, + job_name=self.job_name, + attrs=self.job_attrs, + )
+ + + +def _elapsed_time_as_str(seconds_elapsed): + """Format elapsed time into human readable string""" + days, seconds = divmod(int(seconds_elapsed), 24 * 3600) + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + time_str = f"{hours:d}:{minutes:02d}:{seconds:02d}" + if days: + time_str = f"{days:,d} day{'s' if abs(days) != 1 else ''}, {time_str}" + return time_str + + +def _add_elapsed_time(status_df): + """Add elapsed time to status DataFrame""" + has_start_time = ~status_df[StatusField.TIME_START].isna() + has_no_end_time = status_df[StatusField.TIME_END].isna() + has_not_failed = status_df[StatusField.JOB_STATUS] != StatusOption.FAILED + mask = has_start_time & (has_no_end_time & has_not_failed) + + start_times = status_df.loc[mask, StatusField.TIME_START] + start_times = pd.to_datetime(start_times, format=DT_FMT) + elapsed_times = dt.datetime.now() - start_times + elapsed_times = elapsed_times.apply(lambda dt: dt.total_seconds()) + status_df.loc[mask, StatusField.RUNTIME_SECONDS] = elapsed_times + elapsed_times = elapsed_times.apply(_elapsed_time_as_str) + elapsed_times = elapsed_times.apply(lambda time_str: f"{time_str} (r)") + status_df.loc[mask, StatusField.TOTAL_RUNTIME] = elapsed_times + return status_df + + +def _load(fpath): + """Load status json.""" + if fpath.is_file(): + return safe_json_load(fpath.as_posix()) + return {} + + +def _load_job_file(status_dir, job_name, purge=True): + """Load a single-job job status file in the target status_dir.""" + status_dir = Path(status_dir) + status_fname = status_dir / JOB_STATUS_FILE.format(job_name) + if status_fname not in status_dir.glob("*"): + return None + return _safe_load(status_fname, purge=purge) + + +def _safe_load(file_path, purge=True): + """Safe load json file and purge if needed.""" + # wait one second to make sure file is finished being written + time.sleep(0.01) + status = safe_json_load(file_path.as_posix()) + if purge: + file_path.unlink() + return status + + +def _get_attr_flat_list(inp, key=StatusField.JOB_ID): + """Get all job attribute values from the status data dict.""" + + out = [] + if not isinstance(inp, abc.Mapping): + return out + + if key in inp: + out = _combine_iterables_and_numerics(out, inp[key]) + else: + for val in inp.values(): + temp = _get_attr_flat_list(val, key=key) + out = _combine_iterables_and_numerics(out, temp) + + return out + + +def _combine_iterables_and_numerics(iterable, new_vals): + """Combine new_vals into the "iterable" list""" + try: + return iterable + new_vals + except TypeError: + pass + + return iterable + [new_vals] + + +def _validate_hardware(hardware): + """Verify that the selected hardware is a valid option.""" + try: + return HardwareOption(hardware) + except ValueError as err: + msg = ( + f"Requested hardware ({hardware!r}) not recognized! " + f"Available options are: {HardwareOption.members_as_str()}." + ) + raise gapsKeyError(msg) from err + + +def _iter_job_status(status): + """Iterate over job status dictionary.""" + for job_status in status.values(): + if not isinstance(job_status, dict): + continue + yield job_status +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/utilities.html b/_modules/gaps/utilities.html new file mode 100644 index 00000000..54f3d890 --- /dev/null +++ b/_modules/gaps/utilities.html @@ -0,0 +1,578 @@ + + + + + + + + + + gaps.utilities — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.utilities

+# -*- coding: utf-8 -*-
+"""
+GAPs utilities.
+"""
+import sys
+import copy
+import logging
+import collections
+from enum import Enum
+from pathlib import Path
+
+from gaps.exceptions import gapsValueError
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class CaseInsensitiveEnum(str, Enum): + """A string enum that is case insensitive.""" + + def __new__(cls, value): + """Create new enum member.""" + + value = value.lower().strip() + obj = str.__new__(cls, value) + obj._value_ = value + obj = cls._new_post_hook(obj, value) + return obj + + # pylint: disable=inconsistent-return-statements + @classmethod + def _missing_(cls, value): + """Convert value to lowercase before lookup.""" + if value is None: + return + value = value.lower().strip() + for member in cls: + if member.value == value: + return member + + # pylint: disable=unused-argument + @classmethod + def _new_post_hook(cls, obj, value): + """Hook for post-processing after __new__""" + return obj + +
+[docs] + @classmethod + def members_as_str(cls): + """Set of enum members as strings.""" + return {member.value for member in cls}
+
+ + + +
+[docs] +def project_points_from_container_or_slice(project_points): + """Parse project point input into a list of GIDs. + + Parameters + ---------- + project_points : numeric | container + A number or container of numbers that holds GID values. If a + mapping (e.g. dict, pd.DataFrame, etc), a "gid" must map to the + desired values. + + Returns + ------- + list + A list of integer GID values. + """ + try: + project_points = project_points["gid"] + except (KeyError, TypeError, IndexError): + pass + + try: + project_points = project_points.values + except AttributeError: + pass + + try: + project_points = _slice_to_list(project_points) + except AttributeError: + pass + + try: + return [int(g) for g in project_points] + except TypeError: + return [int(g) for g in [project_points]]
+ + + +def _slice_to_list(inputs_slice): + """Convert a slice to a list of values.""" + start = inputs_slice.start or 0 + end = inputs_slice.stop + if end is None: + raise gapsValueError("slice must be bounded!") + step = inputs_slice.step or 1 + return list(range(start, end, step)) + + +
+[docs] +def recursively_update_dict(existing, new): + """Update a dictionary recursively. + + Parameters + ---------- + existing : dict + Existing dictionary to update. Dictionary is copied before + recursive update is applied. + new : dict + New dictionary with data to add to `existing`. + + Returns + ------- + dict + Existing dictionary with data updated from new dictionary. + """ + + existing = copy.deepcopy(existing) + + for key, val in new.items(): + if isinstance(val, collections.abc.Mapping): + existing[key] = recursively_update_dict(existing.get(key, {}), val) + else: + existing[key] = val + return existing
+ + + +
+[docs] +def resolve_path(path, base_dir): + """Resolve a file path represented by the input string. + + This function resolves the input string if it resembles a path. + Specifically, the string will be resolved if it starts with + "``./``" or "``..``", or it if it contains either "``./``" or + "``..``" somewhere in the string body. Otherwise, the string + is returned unchanged, so this function *is* safe to call on any + string, even ones that do not resemble a path. + This method delegates the "resolving" logic to + :meth:`pathlib.Path.resolve`. This means the path is made + absolute, symlinks are resolved, and "``..``" components are + eliminated. If the ``path`` input starts with "``./``" or + "``..``", it is assumed to be w.r.t the config directory, *not* + the run directory. + + Parameters + ---------- + path : str + Input file path. + base_dir : path-like + Base path to directory from which to resolve path string + (typically current directory). + + Returns + ------- + str + The resolved path. + """ + base_dir = Path(base_dir) + + if path.startswith("./"): + path = base_dir / Path(path[2:]) + elif path.startswith(".."): + path = base_dir / Path(path) + elif "./" in path: # this covers both './' and '../' + path = Path(path) + + try: + path = path.expanduser().resolve().as_posix() + except AttributeError: # `path` is still a `str` + pass + + return path
+ + + +def _is_sphinx_build(): + """``True`` if sphinx is in system modules, else ``False``""" + return "sphinx" in sys.modules +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/gaps/warnings.html b/_modules/gaps/warnings.html new file mode 100644 index 00000000..5f876acf --- /dev/null +++ b/_modules/gaps/warnings.html @@ -0,0 +1,430 @@ + + + + + + + + + + gaps.warnings — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + +
+ +

Source code for gaps.warnings

+# -*- coding: utf-8 -*-
+# pylint: disable=invalid-name
+"""Custom Warning for GAPs. """
+import logging
+
+logger = logging.getLogger("gaps")
+
+
+
+[docs] +class gapsWarning(UserWarning): + """Generic gaps Warning.""" + + def __init__(self, *args, **kwargs): + """Init exception and broadcast message to logger.""" + super().__init__(*args, **kwargs) + if args: + logger.warning(str(args[0]), stacklevel=2)
+ + + +
+[docs] +class gapsCollectionWarning(gapsWarning): + """gaps Collection waring."""
+ + + +
+[docs] +class gapsHPCWarning(gapsWarning): + """gaps HPC warning."""
+ + + +
+[docs] +class gapsDeprecationWarning(gapsWarning, DeprecationWarning): + """gaps deprecation warning."""
+ +
+ +
+ + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/index.html b/_modules/index.html new file mode 100644 index 00000000..8ff137d6 --- /dev/null +++ b/_modules/index.html @@ -0,0 +1,410 @@ + + + + + + + + + + Overview: module code — gaps 0.4.2 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+ + + +
+
+ +
+ + + + + + + + + + + +
+ +
+ + +
+
+ +
+
+ +
+ +
+ + + + +
+ +
+ + +
+
+ + + + + + + + + +
+ +
+ +
+
+
+ +
+ +
+ +
+ + + + +
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.batch.BatchJob.rst.txt b/_sources/_autosummary/gaps.batch.BatchJob.rst.txt new file mode 100644 index 00000000..58c1986c --- /dev/null +++ b/_sources/_autosummary/gaps.batch.BatchJob.rst.txt @@ -0,0 +1,33 @@ +gaps.batch.BatchJob +=================== + +.. currentmodule:: gaps.batch + +.. autoclass:: BatchJob + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~BatchJob.cancel + ~BatchJob.delete + ~BatchJob.run + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~BatchJob.job_table + ~BatchJob.sub_dirs + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.batch.BatchSet.rst.txt b/_sources/_autosummary/gaps.batch.BatchSet.rst.txt new file mode 100644 index 00000000..0a7dfe80 --- /dev/null +++ b/_sources/_autosummary/gaps.batch.BatchSet.rst.txt @@ -0,0 +1,33 @@ +gaps.batch.BatchSet +=================== + +.. currentmodule:: gaps.batch + +.. autoclass:: BatchSet + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~BatchSet.count + ~BatchSet.index + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~BatchSet.arg_combo + ~BatchSet.file_set + ~BatchSet.tag + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.batch.rst.txt b/_sources/_autosummary/gaps.batch.rst.txt new file mode 100644 index 00000000..73242441 --- /dev/null +++ b/_sources/_autosummary/gaps.batch.rst.txt @@ -0,0 +1,32 @@ +gaps.batch +========== + +.. automodule:: gaps.batch + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + BatchJob + BatchSet + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.batch.batch_command.rst.txt b/_sources/_autosummary/gaps.cli.batch.batch_command.rst.txt new file mode 100644 index 00000000..13bfc3a0 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.batch.batch_command.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.batch.batch\_command +============================= + +.. currentmodule:: gaps.cli.batch + +.. autofunction:: batch_command \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.batch.rst.txt b/_sources/_autosummary/gaps.cli.batch.rst.txt new file mode 100644 index 00000000..8530e70d --- /dev/null +++ b/_sources/_autosummary/gaps.cli.batch.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.batch +============== + +.. automodule:: gaps.cli.batch + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + batch_command + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.cli.as_click_command.rst.txt b/_sources/_autosummary/gaps.cli.cli.as_click_command.rst.txt new file mode 100644 index 00000000..f80bd089 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.cli.as_click_command.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.cli.as\_click\_command +=============================== + +.. currentmodule:: gaps.cli.cli + +.. autofunction:: as_click_command \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.cli.make_cli.rst.txt b/_sources/_autosummary/gaps.cli.cli.make_cli.rst.txt new file mode 100644 index 00000000..d312f405 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.cli.make_cli.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.cli.make\_cli +====================== + +.. currentmodule:: gaps.cli.cli + +.. autofunction:: make_cli \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.cli.rst.txt b/_sources/_autosummary/gaps.cli.cli.rst.txt new file mode 100644 index 00000000..8df6f1ea --- /dev/null +++ b/_sources/_autosummary/gaps.cli.cli.rst.txt @@ -0,0 +1,31 @@ +gaps.cli.cli +============ + +.. automodule:: gaps.cli.cli + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + as_click_command + make_cli + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.collect.collect.rst.txt b/_sources/_autosummary/gaps.cli.collect.collect.rst.txt new file mode 100644 index 00000000..45c7a15d --- /dev/null +++ b/_sources/_autosummary/gaps.cli.collect.collect.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.collect.collect +======================== + +.. currentmodule:: gaps.cli.collect + +.. autofunction:: collect \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.collect.rst.txt b/_sources/_autosummary/gaps.cli.collect.rst.txt new file mode 100644 index 00000000..ded42618 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.collect.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.collect +================ + +.. automodule:: gaps.cli.collect + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + collect + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.rst.txt b/_sources/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.rst.txt new file mode 100644 index 00000000..b6f9417c --- /dev/null +++ b/_sources/_autosummary/gaps.cli.command.AbstractBaseCLICommandConfiguration.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.command.AbstractBaseCLICommandConfiguration +==================================================== + +.. currentmodule:: gaps.cli.command + +.. autoclass:: AbstractBaseCLICommandConfiguration + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~AbstractBaseCLICommandConfiguration.documentation + ~AbstractBaseCLICommandConfiguration.is_split_spatially + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.command.CLICommandConfiguration.rst.txt b/_sources/_autosummary/gaps.cli.command.CLICommandConfiguration.rst.txt new file mode 100644 index 00000000..e607bbdb --- /dev/null +++ b/_sources/_autosummary/gaps.cli.command.CLICommandConfiguration.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.command.CLICommandConfiguration +======================================== + +.. currentmodule:: gaps.cli.command + +.. autofunction:: CLICommandConfiguration \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.command.CLICommandFromClass.rst.txt b/_sources/_autosummary/gaps.cli.command.CLICommandFromClass.rst.txt new file mode 100644 index 00000000..5d23ba03 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.command.CLICommandFromClass.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.command.CLICommandFromClass +==================================== + +.. currentmodule:: gaps.cli.command + +.. autoclass:: CLICommandFromClass + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~CLICommandFromClass.documentation + ~CLICommandFromClass.is_split_spatially + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.command.CLICommandFromFunction.rst.txt b/_sources/_autosummary/gaps.cli.command.CLICommandFromFunction.rst.txt new file mode 100644 index 00000000..579b64b3 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.command.CLICommandFromFunction.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.command.CLICommandFromFunction +======================================= + +.. currentmodule:: gaps.cli.command + +.. autoclass:: CLICommandFromFunction + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~CLICommandFromFunction.documentation + ~CLICommandFromFunction.is_split_spatially + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.command.rst.txt b/_sources/_autosummary/gaps.cli.command.rst.txt new file mode 100644 index 00000000..073bfe56 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.command.rst.txt @@ -0,0 +1,40 @@ +gaps.cli.command +================ + +.. automodule:: gaps.cli.command + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + CLICommandConfiguration + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + AbstractBaseCLICommandConfiguration + CLICommandFromClass + CLICommandFromFunction + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.config.as_script_str.rst.txt b/_sources/_autosummary/gaps.cli.config.as_script_str.rst.txt new file mode 100644 index 00000000..6b0b8c6e --- /dev/null +++ b/_sources/_autosummary/gaps.cli.config.as_script_str.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.config.as\_script\_str +=============================== + +.. currentmodule:: gaps.cli.config + +.. autofunction:: as_script_str \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.config.from_config.rst.txt b/_sources/_autosummary/gaps.cli.config.from_config.rst.txt new file mode 100644 index 00000000..d8761c77 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.config.from_config.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.config.from\_config +============================ + +.. currentmodule:: gaps.cli.config + +.. autofunction:: from_config \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.config.node_kwargs.rst.txt b/_sources/_autosummary/gaps.cli.config.node_kwargs.rst.txt new file mode 100644 index 00000000..ffbb6d1b --- /dev/null +++ b/_sources/_autosummary/gaps.cli.config.node_kwargs.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.config.node\_kwargs +============================ + +.. currentmodule:: gaps.cli.config + +.. autofunction:: node_kwargs \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.config.rst.txt b/_sources/_autosummary/gaps.cli.config.rst.txt new file mode 100644 index 00000000..c2d5efbd --- /dev/null +++ b/_sources/_autosummary/gaps.cli.config.rst.txt @@ -0,0 +1,33 @@ +gaps.cli.config +=============== + +.. automodule:: gaps.cli.config + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + as_script_str + from_config + node_kwargs + run_with_status_updates + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.config.run_with_status_updates.rst.txt b/_sources/_autosummary/gaps.cli.config.run_with_status_updates.rst.txt new file mode 100644 index 00000000..de4fe445 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.config.run_with_status_updates.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.config.run\_with\_status\_updates +========================================== + +.. currentmodule:: gaps.cli.config + +.. autofunction:: run_with_status_updates \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.documentation.CommandDocumentation.rst.txt b/_sources/_autosummary/gaps.cli.documentation.CommandDocumentation.rst.txt new file mode 100644 index 00000000..7381aba6 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.documentation.CommandDocumentation.rst.txt @@ -0,0 +1,38 @@ +gaps.cli.documentation.CommandDocumentation +=========================================== + +.. currentmodule:: gaps.cli.documentation + +.. autoclass:: CommandDocumentation + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~CommandDocumentation.command_help + ~CommandDocumentation.config_help + ~CommandDocumentation.param_required + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~CommandDocumentation.REQUIRED_TAG + ~CommandDocumentation.default_exec_values + ~CommandDocumentation.exec_control_doc + ~CommandDocumentation.extended_summary + ~CommandDocumentation.parameter_help + ~CommandDocumentation.required_args + ~CommandDocumentation.template_config + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.documentation.rst.txt b/_sources/_autosummary/gaps.cli.documentation.rst.txt new file mode 100644 index 00000000..33e27be6 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.documentation.rst.txt @@ -0,0 +1,31 @@ +gaps.cli.documentation +====================== + +.. automodule:: gaps.cli.documentation + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + CommandDocumentation + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.execution.kickoff_job.rst.txt b/_sources/_autosummary/gaps.cli.execution.kickoff_job.rst.txt new file mode 100644 index 00000000..ed6bae61 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.execution.kickoff_job.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.execution.kickoff\_job +=============================== + +.. currentmodule:: gaps.cli.execution + +.. autofunction:: kickoff_job \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.execution.rst.txt b/_sources/_autosummary/gaps.cli.execution.rst.txt new file mode 100644 index 00000000..70e80996 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.execution.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.execution +================== + +.. automodule:: gaps.cli.execution + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + kickoff_job + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.pipeline.pipeline.rst.txt b/_sources/_autosummary/gaps.cli.pipeline.pipeline.rst.txt new file mode 100644 index 00000000..df923f61 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.pipeline.pipeline.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.pipeline.pipeline +========================== + +.. currentmodule:: gaps.cli.pipeline + +.. autofunction:: pipeline \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.pipeline.pipeline_command.rst.txt b/_sources/_autosummary/gaps.cli.pipeline.pipeline_command.rst.txt new file mode 100644 index 00000000..1584543e --- /dev/null +++ b/_sources/_autosummary/gaps.cli.pipeline.pipeline_command.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.pipeline.pipeline\_command +=================================== + +.. currentmodule:: gaps.cli.pipeline + +.. autofunction:: pipeline_command \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.pipeline.rst.txt b/_sources/_autosummary/gaps.cli.pipeline.rst.txt new file mode 100644 index 00000000..b003e72a --- /dev/null +++ b/_sources/_autosummary/gaps.cli.pipeline.rst.txt @@ -0,0 +1,32 @@ +gaps.cli.pipeline +================= + +.. automodule:: gaps.cli.pipeline + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + pipeline + pipeline_command + template_pipeline_config + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.pipeline.template_pipeline_config.rst.txt b/_sources/_autosummary/gaps.cli.pipeline.template_pipeline_config.rst.txt new file mode 100644 index 00000000..91dc7a55 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.pipeline.template_pipeline_config.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.pipeline.template\_pipeline\_config +============================================ + +.. currentmodule:: gaps.cli.pipeline + +.. autofunction:: template_pipeline_config \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.rst.txt b/_sources/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.rst.txt new file mode 100644 index 00000000..70ba38e7 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.preprocessing.preprocess_collect_config.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.preprocessing.preprocess\_collect\_config +================================================== + +.. currentmodule:: gaps.cli.preprocessing + +.. autofunction:: preprocess_collect_config \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.preprocessing.rst.txt b/_sources/_autosummary/gaps.cli.preprocessing.rst.txt new file mode 100644 index 00000000..019f2184 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.preprocessing.rst.txt @@ -0,0 +1,31 @@ +gaps.cli.preprocessing +====================== + +.. automodule:: gaps.cli.preprocessing + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + preprocess_collect_config + split_project_points_into_ranges + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.rst.txt b/_sources/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.rst.txt new file mode 100644 index 00000000..e67d73d4 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.preprocessing.split_project_points_into_ranges.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.preprocessing.split\_project\_points\_into\_ranges +=========================================================== + +.. currentmodule:: gaps.cli.preprocessing + +.. autofunction:: split_project_points_into_ranges \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.rst.txt b/_sources/_autosummary/gaps.cli.rst.txt new file mode 100644 index 00000000..2f3ee5b8 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.rst.txt @@ -0,0 +1,40 @@ +gaps.cli +======== + +.. automodule:: gaps.cli + + + + + + + + + + + + + + + + + + + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + + gaps.cli.batch + gaps.cli.cli + gaps.cli.collect + gaps.cli.command + gaps.cli.config + gaps.cli.documentation + gaps.cli.execution + gaps.cli.pipeline + gaps.cli.preprocessing + gaps.cli.status + gaps.cli.templates + diff --git a/_sources/_autosummary/gaps.cli.status.main_monitor.rst.txt b/_sources/_autosummary/gaps.cli.status.main_monitor.rst.txt new file mode 100644 index 00000000..c39d944d --- /dev/null +++ b/_sources/_autosummary/gaps.cli.status.main_monitor.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.status.main\_monitor +============================= + +.. currentmodule:: gaps.cli.status + +.. autofunction:: main_monitor \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.status.rst.txt b/_sources/_autosummary/gaps.cli.status.rst.txt new file mode 100644 index 00000000..88e8d37a --- /dev/null +++ b/_sources/_autosummary/gaps.cli.status.rst.txt @@ -0,0 +1,31 @@ +gaps.cli.status +=============== + +.. automodule:: gaps.cli.status + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + main_monitor + status_command + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.status.status_command.rst.txt b/_sources/_autosummary/gaps.cli.status.status_command.rst.txt new file mode 100644 index 00000000..670adc07 --- /dev/null +++ b/_sources/_autosummary/gaps.cli.status.status_command.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.status.status\_command +=============================== + +.. currentmodule:: gaps.cli.status + +.. autofunction:: status_command \ No newline at end of file diff --git a/_sources/_autosummary/gaps.cli.templates.rst.txt b/_sources/_autosummary/gaps.cli.templates.rst.txt new file mode 100644 index 00000000..e564f0fa --- /dev/null +++ b/_sources/_autosummary/gaps.cli.templates.rst.txt @@ -0,0 +1,30 @@ +gaps.cli.templates +================== + +.. automodule:: gaps.cli.templates + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + template_command + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.cli.templates.template_command.rst.txt b/_sources/_autosummary/gaps.cli.templates.template_command.rst.txt new file mode 100644 index 00000000..bd66bbef --- /dev/null +++ b/_sources/_autosummary/gaps.cli.templates.template_command.rst.txt @@ -0,0 +1,6 @@ +gaps.cli.templates.template\_command +==================================== + +.. currentmodule:: gaps.cli.templates + +.. autofunction:: template_command \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.Collector.rst.txt b/_sources/_autosummary/gaps.collection.Collector.rst.txt new file mode 100644 index 00000000..d4ff8130 --- /dev/null +++ b/_sources/_autosummary/gaps.collection.Collector.rst.txt @@ -0,0 +1,37 @@ +gaps.collection.Collector +========================= + +.. currentmodule:: gaps.collection + +.. autoclass:: Collector + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Collector.add_dataset + ~Collector.collect + ~Collector.combine_meta + ~Collector.combine_time_index + ~Collector.get_dataset_shape + ~Collector.move_chunks + ~Collector.purge_chunks + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Collector.gids + ~Collector.h5_files + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.DatasetCollector.rst.txt b/_sources/_autosummary/gaps.collection.DatasetCollector.rst.txt new file mode 100644 index 00000000..3900b51c --- /dev/null +++ b/_sources/_autosummary/gaps.collection.DatasetCollector.rst.txt @@ -0,0 +1,31 @@ +gaps.collection.DatasetCollector +================================ + +.. currentmodule:: gaps.collection + +.. autoclass:: DatasetCollector + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~DatasetCollector.collect_dataset + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~DatasetCollector.duplicate_gids + ~DatasetCollector.gids + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.find_h5_files.rst.txt b/_sources/_autosummary/gaps.collection.find_h5_files.rst.txt new file mode 100644 index 00000000..5aaa80a8 --- /dev/null +++ b/_sources/_autosummary/gaps.collection.find_h5_files.rst.txt @@ -0,0 +1,6 @@ +gaps.collection.find\_h5\_files +=============================== + +.. currentmodule:: gaps.collection + +.. autofunction:: find_h5_files \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.parse_gids_from_files.rst.txt b/_sources/_autosummary/gaps.collection.parse_gids_from_files.rst.txt new file mode 100644 index 00000000..b265f13e --- /dev/null +++ b/_sources/_autosummary/gaps.collection.parse_gids_from_files.rst.txt @@ -0,0 +1,6 @@ +gaps.collection.parse\_gids\_from\_files +======================================== + +.. currentmodule:: gaps.collection + +.. autofunction:: parse_gids_from_files \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.parse_meta.rst.txt b/_sources/_autosummary/gaps.collection.parse_meta.rst.txt new file mode 100644 index 00000000..f4f34141 --- /dev/null +++ b/_sources/_autosummary/gaps.collection.parse_meta.rst.txt @@ -0,0 +1,6 @@ +gaps.collection.parse\_meta +=========================== + +.. currentmodule:: gaps.collection + +.. autofunction:: parse_meta \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.parse_project_points.rst.txt b/_sources/_autosummary/gaps.collection.parse_project_points.rst.txt new file mode 100644 index 00000000..c4d79e4d --- /dev/null +++ b/_sources/_autosummary/gaps.collection.parse_project_points.rst.txt @@ -0,0 +1,6 @@ +gaps.collection.parse\_project\_points +====================================== + +.. currentmodule:: gaps.collection + +.. autofunction:: parse_project_points \ No newline at end of file diff --git a/_sources/_autosummary/gaps.collection.rst.txt b/_sources/_autosummary/gaps.collection.rst.txt new file mode 100644 index 00000000..29ca7e75 --- /dev/null +++ b/_sources/_autosummary/gaps.collection.rst.txt @@ -0,0 +1,42 @@ +gaps.collection +=============== + +.. automodule:: gaps.collection + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + find_h5_files + parse_gids_from_files + parse_meta + parse_project_points + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + Collector + DatasetCollector + + + + + + + + + diff --git a/_sources/_autosummary/gaps.config.ConfigType.rst.txt b/_sources/_autosummary/gaps.config.ConfigType.rst.txt new file mode 100644 index 00000000..f60a0f0d --- /dev/null +++ b/_sources/_autosummary/gaps.config.ConfigType.rst.txt @@ -0,0 +1,28 @@ +gaps.config.ConfigType +====================== + +.. currentmodule:: gaps.config + +.. autoclass:: ConfigType + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ConfigType.JSON + ~ConfigType.JSON5 + ~ConfigType.YAML + ~ConfigType.YML + ~ConfigType.TOML + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.Handler.rst.txt b/_sources/_autosummary/gaps.config.Handler.rst.txt new file mode 100644 index 00000000..ef97d2df --- /dev/null +++ b/_sources/_autosummary/gaps.config.Handler.rst.txt @@ -0,0 +1,34 @@ +gaps.config.Handler +=================== + +.. currentmodule:: gaps.config + +.. autoclass:: Handler + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Handler.dump + ~Handler.dumps + ~Handler.load + ~Handler.loads + ~Handler.write + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Handler.FILE_EXTENSION + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.JSON5Handler.rst.txt b/_sources/_autosummary/gaps.config.JSON5Handler.rst.txt new file mode 100644 index 00000000..106b7fcb --- /dev/null +++ b/_sources/_autosummary/gaps.config.JSON5Handler.rst.txt @@ -0,0 +1,34 @@ +gaps.config.JSON5Handler +======================== + +.. currentmodule:: gaps.config + +.. autoclass:: JSON5Handler + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~JSON5Handler.dump + ~JSON5Handler.dumps + ~JSON5Handler.load + ~JSON5Handler.loads + ~JSON5Handler.write + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~JSON5Handler.FILE_EXTENSION + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.JSONHandler.rst.txt b/_sources/_autosummary/gaps.config.JSONHandler.rst.txt new file mode 100644 index 00000000..8c083c74 --- /dev/null +++ b/_sources/_autosummary/gaps.config.JSONHandler.rst.txt @@ -0,0 +1,34 @@ +gaps.config.JSONHandler +======================= + +.. currentmodule:: gaps.config + +.. autoclass:: JSONHandler + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~JSONHandler.dump + ~JSONHandler.dumps + ~JSONHandler.load + ~JSONHandler.loads + ~JSONHandler.write + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~JSONHandler.FILE_EXTENSION + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.TOMLHandler.rst.txt b/_sources/_autosummary/gaps.config.TOMLHandler.rst.txt new file mode 100644 index 00000000..95cfc032 --- /dev/null +++ b/_sources/_autosummary/gaps.config.TOMLHandler.rst.txt @@ -0,0 +1,34 @@ +gaps.config.TOMLHandler +======================= + +.. currentmodule:: gaps.config + +.. autoclass:: TOMLHandler + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~TOMLHandler.dump + ~TOMLHandler.dumps + ~TOMLHandler.load + ~TOMLHandler.loads + ~TOMLHandler.write + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TOMLHandler.FILE_EXTENSION + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.YAMLHandler.rst.txt b/_sources/_autosummary/gaps.config.YAMLHandler.rst.txt new file mode 100644 index 00000000..af85fc93 --- /dev/null +++ b/_sources/_autosummary/gaps.config.YAMLHandler.rst.txt @@ -0,0 +1,34 @@ +gaps.config.YAMLHandler +======================= + +.. currentmodule:: gaps.config + +.. autoclass:: YAMLHandler + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~YAMLHandler.dump + ~YAMLHandler.dumps + ~YAMLHandler.load + ~YAMLHandler.loads + ~YAMLHandler.write + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~YAMLHandler.FILE_EXTENSION + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.config_as_str_for_docstring.rst.txt b/_sources/_autosummary/gaps.config.config_as_str_for_docstring.rst.txt new file mode 100644 index 00000000..8f66489f --- /dev/null +++ b/_sources/_autosummary/gaps.config.config_as_str_for_docstring.rst.txt @@ -0,0 +1,6 @@ +gaps.config.config\_as\_str\_for\_docstring +=========================================== + +.. currentmodule:: gaps.config + +.. autofunction:: config_as_str_for_docstring \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.init_logging_from_config_file.rst.txt b/_sources/_autosummary/gaps.config.init_logging_from_config_file.rst.txt new file mode 100644 index 00000000..3c81ecd9 --- /dev/null +++ b/_sources/_autosummary/gaps.config.init_logging_from_config_file.rst.txt @@ -0,0 +1,6 @@ +gaps.config.init\_logging\_from\_config\_file +============================================= + +.. currentmodule:: gaps.config + +.. autofunction:: init_logging_from_config_file \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.load_config.rst.txt b/_sources/_autosummary/gaps.config.load_config.rst.txt new file mode 100644 index 00000000..2d1b8b02 --- /dev/null +++ b/_sources/_autosummary/gaps.config.load_config.rst.txt @@ -0,0 +1,6 @@ +gaps.config.load\_config +======================== + +.. currentmodule:: gaps.config + +.. autofunction:: load_config \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.resolve_all_paths.rst.txt b/_sources/_autosummary/gaps.config.resolve_all_paths.rst.txt new file mode 100644 index 00000000..f43df689 --- /dev/null +++ b/_sources/_autosummary/gaps.config.resolve_all_paths.rst.txt @@ -0,0 +1,6 @@ +gaps.config.resolve\_all\_paths +=============================== + +.. currentmodule:: gaps.config + +.. autofunction:: resolve_all_paths \ No newline at end of file diff --git a/_sources/_autosummary/gaps.config.rst.txt b/_sources/_autosummary/gaps.config.rst.txt new file mode 100644 index 00000000..8cf06152 --- /dev/null +++ b/_sources/_autosummary/gaps.config.rst.txt @@ -0,0 +1,53 @@ +gaps.config +=========== + +.. automodule:: gaps.config + + + + .. rubric:: Module attributes + + .. autosummary:: + :toctree: + + ConfigType + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + config_as_str_for_docstring + init_logging_from_config_file + load_config + resolve_all_paths + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + ConfigType + Handler + JSON5Handler + JSONHandler + TOMLHandler + YAMLHandler + + + + + + + + + diff --git a/_sources/_autosummary/gaps.exceptions.gapsConfigError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsConfigError.rst.txt new file mode 100644 index 00000000..714c44d8 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsConfigError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsConfigError +=============================== + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsConfigError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsError.rst.txt new file mode 100644 index 00000000..8b591eb8 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsError +========================= + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsExecutionError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsExecutionError.rst.txt new file mode 100644 index 00000000..6c3ca9cd --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsExecutionError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsExecutionError +================================== + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsExecutionError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsHPCError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsHPCError.rst.txt new file mode 100644 index 00000000..75213ea1 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsHPCError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsHPCError +============================ + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsHPCError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsIOError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsIOError.rst.txt new file mode 100644 index 00000000..63a6f839 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsIOError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsIOError +=========================== + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsIOError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsIndexError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsIndexError.rst.txt new file mode 100644 index 00000000..23952de8 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsIndexError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsIndexError +============================== + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsIndexError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsKeyError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsKeyError.rst.txt new file mode 100644 index 00000000..75d8f5f0 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsKeyError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsKeyError +============================ + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsKeyError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsRuntimeError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsRuntimeError.rst.txt new file mode 100644 index 00000000..15e79c0f --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsRuntimeError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsRuntimeError +================================ + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsRuntimeError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsTypeError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsTypeError.rst.txt new file mode 100644 index 00000000..d81f7ea3 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsTypeError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsTypeError +============================= + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsTypeError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.gapsValueError.rst.txt b/_sources/_autosummary/gaps.exceptions.gapsValueError.rst.txt new file mode 100644 index 00000000..6de5e5e4 --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.gapsValueError.rst.txt @@ -0,0 +1,6 @@ +gaps.exceptions.gapsValueError +============================== + +.. currentmodule:: gaps.exceptions + +.. autoexception:: gapsValueError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.exceptions.rst.txt b/_sources/_autosummary/gaps.exceptions.rst.txt new file mode 100644 index 00000000..d1f9cfad --- /dev/null +++ b/_sources/_autosummary/gaps.exceptions.rst.txt @@ -0,0 +1,39 @@ +gaps.exceptions +=============== + +.. automodule:: gaps.exceptions + + + + + + + + + + + + + + + + .. rubric:: Exceptions + + .. autosummary:: + :toctree: + + gapsConfigError + gapsError + gapsExecutionError + gapsHPCError + gapsIOError + gapsIndexError + gapsKeyError + gapsRuntimeError + gapsTypeError + gapsValueError + + + + + diff --git a/_sources/_autosummary/gaps.hpc.COMMANDS.rst.txt b/_sources/_autosummary/gaps.hpc.COMMANDS.rst.txt new file mode 100644 index 00000000..563693ec --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.COMMANDS.rst.txt @@ -0,0 +1,32 @@ +gaps.hpc.COMMANDS +================= + +.. currentmodule:: gaps.hpc + +.. autoclass:: COMMANDS + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~COMMANDS.count + ~COMMANDS.index + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~COMMANDS.CANCEL + ~COMMANDS.SUBMIT + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.rst.txt b/_sources/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.rst.txt new file mode 100644 index 00000000..976de822 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.DEFAULT_STDOUT_PATH.rst.txt @@ -0,0 +1,6 @@ +gaps.hpc.DEFAULT\_STDOUT\_PATH +============================== + +.. currentmodule:: gaps.hpc + +.. autodata:: DEFAULT_STDOUT_PATH \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.HpcJobManager.rst.txt b/_sources/_autosummary/gaps.hpc.HpcJobManager.rst.txt new file mode 100644 index 00000000..2ff8bc98 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.HpcJobManager.rst.txt @@ -0,0 +1,43 @@ +gaps.hpc.HpcJobManager +====================== + +.. currentmodule:: gaps.hpc + +.. autoclass:: HpcJobManager + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~HpcJobManager.cancel + ~HpcJobManager.check_status_using_job_id + ~HpcJobManager.check_status_using_job_name + ~HpcJobManager.make_script_str + ~HpcJobManager.parse_queue_str + ~HpcJobManager.query_queue + ~HpcJobManager.reset_query_cache + ~HpcJobManager.submit + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~HpcJobManager.COLUMN_HEADERS + ~HpcJobManager.COMMANDS + ~HpcJobManager.MAX_NAME_LEN + ~HpcJobManager.Q_SUBMITTED_STATUS + ~HpcJobManager.SHELL_FILENAME_FMT + ~HpcJobManager.USER + ~HpcJobManager.queue + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.PBS.rst.txt b/_sources/_autosummary/gaps.hpc.PBS.rst.txt new file mode 100644 index 00000000..7842c1d8 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.PBS.rst.txt @@ -0,0 +1,43 @@ +gaps.hpc.PBS +============ + +.. currentmodule:: gaps.hpc + +.. autoclass:: PBS + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~PBS.cancel + ~PBS.check_status_using_job_id + ~PBS.check_status_using_job_name + ~PBS.make_script_str + ~PBS.parse_queue_str + ~PBS.query_queue + ~PBS.reset_query_cache + ~PBS.submit + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~PBS.COLUMN_HEADERS + ~PBS.COMMANDS + ~PBS.MAX_NAME_LEN + ~PBS.Q_SUBMITTED_STATUS + ~PBS.SHELL_FILENAME_FMT + ~PBS.USER + ~PBS.queue + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.Q_COLUMNS.rst.txt b/_sources/_autosummary/gaps.hpc.Q_COLUMNS.rst.txt new file mode 100644 index 00000000..013cd9bb --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.Q_COLUMNS.rst.txt @@ -0,0 +1,33 @@ +gaps.hpc.Q\_COLUMNS +=================== + +.. currentmodule:: gaps.hpc + +.. autoclass:: Q_COLUMNS + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Q_COLUMNS.count + ~Q_COLUMNS.index + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Q_COLUMNS.ID + ~Q_COLUMNS.NAME + ~Q_COLUMNS.STATUS + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.SLURM.rst.txt b/_sources/_autosummary/gaps.hpc.SLURM.rst.txt new file mode 100644 index 00000000..2abaf0f0 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.SLURM.rst.txt @@ -0,0 +1,43 @@ +gaps.hpc.SLURM +============== + +.. currentmodule:: gaps.hpc + +.. autoclass:: SLURM + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~SLURM.cancel + ~SLURM.check_status_using_job_id + ~SLURM.check_status_using_job_name + ~SLURM.make_script_str + ~SLURM.parse_queue_str + ~SLURM.query_queue + ~SLURM.reset_query_cache + ~SLURM.submit + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~SLURM.COLUMN_HEADERS + ~SLURM.COMMANDS + ~SLURM.MAX_NAME_LEN + ~SLURM.Q_SUBMITTED_STATUS + ~SLURM.SHELL_FILENAME_FMT + ~SLURM.USER + ~SLURM.queue + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.format_env.rst.txt b/_sources/_autosummary/gaps.hpc.format_env.rst.txt new file mode 100644 index 00000000..200709b8 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.format_env.rst.txt @@ -0,0 +1,6 @@ +gaps.hpc.format\_env +==================== + +.. currentmodule:: gaps.hpc + +.. autofunction:: format_env \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.format_walltime.rst.txt b/_sources/_autosummary/gaps.hpc.format_walltime.rst.txt new file mode 100644 index 00000000..68cc8d79 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.format_walltime.rst.txt @@ -0,0 +1,6 @@ +gaps.hpc.format\_walltime +========================= + +.. currentmodule:: gaps.hpc + +.. autofunction:: format_walltime \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.make_sh.rst.txt b/_sources/_autosummary/gaps.hpc.make_sh.rst.txt new file mode 100644 index 00000000..6fd3f6a2 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.make_sh.rst.txt @@ -0,0 +1,6 @@ +gaps.hpc.make\_sh +================= + +.. currentmodule:: gaps.hpc + +.. autofunction:: make_sh \ No newline at end of file diff --git a/_sources/_autosummary/gaps.hpc.rst.txt b/_sources/_autosummary/gaps.hpc.rst.txt new file mode 100644 index 00000000..45103721 --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.rst.txt @@ -0,0 +1,52 @@ +gaps.hpc +======== + +.. automodule:: gaps.hpc + + + + .. rubric:: Module attributes + + .. autosummary:: + :toctree: + + DEFAULT_STDOUT_PATH + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + format_env + format_walltime + make_sh + submit + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + COMMANDS + HpcJobManager + PBS + Q_COLUMNS + SLURM + + + + + + + + + diff --git a/_sources/_autosummary/gaps.hpc.submit.rst.txt b/_sources/_autosummary/gaps.hpc.submit.rst.txt new file mode 100644 index 00000000..0dd3e67c --- /dev/null +++ b/_sources/_autosummary/gaps.hpc.submit.rst.txt @@ -0,0 +1,6 @@ +gaps.hpc.submit +=============== + +.. currentmodule:: gaps.hpc + +.. autofunction:: submit \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.BatchJob.rst.txt b/_sources/_autosummary/gaps.legacy.BatchJob.rst.txt new file mode 100644 index 00000000..78bb6ff7 --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.BatchJob.rst.txt @@ -0,0 +1,37 @@ +gaps.legacy.BatchJob +==================== + +.. currentmodule:: gaps.legacy + +.. autoclass:: BatchJob + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~BatchJob.cancel + ~BatchJob.cancel_all + ~BatchJob.delete + ~BatchJob.delete_all + ~BatchJob.run + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~BatchJob.PIPELINE_BACKGROUND_METHOD + ~BatchJob.PIPELINE_CLASS + ~BatchJob.job_table + ~BatchJob.sub_dirs + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.HardwareStatusRetriever.rst.txt b/_sources/_autosummary/gaps.legacy.HardwareStatusRetriever.rst.txt new file mode 100644 index 00000000..8ba39c83 --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.HardwareStatusRetriever.rst.txt @@ -0,0 +1,23 @@ +gaps.legacy.HardwareStatusRetriever +=================================== + +.. currentmodule:: gaps.legacy + +.. autoclass:: HardwareStatusRetriever + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.Pipeline.rst.txt b/_sources/_autosummary/gaps.legacy.Pipeline.rst.txt new file mode 100644 index 00000000..94a138a5 --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.Pipeline.rst.txt @@ -0,0 +1,34 @@ +gaps.legacy.Pipeline +==================== + +.. currentmodule:: gaps.legacy + +.. autoclass:: Pipeline + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Pipeline.cancel_all + ~Pipeline.run + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Pipeline.CMD_BASE + ~Pipeline.COMMANDS + ~Pipeline.name + ~Pipeline.status + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.PipelineError.rst.txt b/_sources/_autosummary/gaps.legacy.PipelineError.rst.txt new file mode 100644 index 00000000..8740d180 --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.PipelineError.rst.txt @@ -0,0 +1,6 @@ +gaps.legacy.PipelineError +========================= + +.. currentmodule:: gaps.legacy + +.. autoexception:: PipelineError \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.Status.rst.txt b/_sources/_autosummary/gaps.legacy.Status.rst.txt new file mode 100644 index 00000000..7d054e4b --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.Status.rst.txt @@ -0,0 +1,54 @@ +gaps.legacy.Status +================== + +.. currentmodule:: gaps.legacy + +.. autoclass:: Status + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Status.add_job + ~Status.as_df + ~Status.clear + ~Status.copy + ~Status.dump + ~Status.fromkeys + ~Status.get + ~Status.items + ~Status.job_exists + ~Status.keys + ~Status.make_job_file + ~Status.make_single_job_file + ~Status.mark_job_as_submitted + ~Status.parse_command_status + ~Status.pop + ~Status.popitem + ~Status.record_monitor_pid + ~Status.reload + ~Status.retrieve_job_status + ~Status.setdefault + ~Status.update + ~Status.update_from_all_job_files + ~Status.update_job_status + ~Status.values + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Status.job_hardware + ~Status.job_ids + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.legacy.rst.txt b/_sources/_autosummary/gaps.legacy.rst.txt new file mode 100644 index 00000000..f6d791df --- /dev/null +++ b/_sources/_autosummary/gaps.legacy.rst.txt @@ -0,0 +1,41 @@ +gaps.legacy +=========== + +.. automodule:: gaps.legacy + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + BatchJob + HardwareStatusRetriever + Pipeline + Status + + + + + + .. rubric:: Exceptions + + .. autosummary:: + :toctree: + + PipelineError + + + + + diff --git a/_sources/_autosummary/gaps.log.FORMAT.rst.txt b/_sources/_autosummary/gaps.log.FORMAT.rst.txt new file mode 100644 index 00000000..b43d5698 --- /dev/null +++ b/_sources/_autosummary/gaps.log.FORMAT.rst.txt @@ -0,0 +1,6 @@ +gaps.log.FORMAT +=============== + +.. currentmodule:: gaps.log + +.. autodata:: FORMAT \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.add_handlers.rst.txt b/_sources/_autosummary/gaps.log.add_handlers.rst.txt new file mode 100644 index 00000000..d7dfd8c0 --- /dev/null +++ b/_sources/_autosummary/gaps.log.add_handlers.rst.txt @@ -0,0 +1,6 @@ +gaps.log.add\_handlers +====================== + +.. currentmodule:: gaps.log + +.. autofunction:: add_handlers \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.init_logger.rst.txt b/_sources/_autosummary/gaps.log.init_logger.rst.txt new file mode 100644 index 00000000..2b42b6c3 --- /dev/null +++ b/_sources/_autosummary/gaps.log.init_logger.rst.txt @@ -0,0 +1,6 @@ +gaps.log.init\_logger +===================== + +.. currentmodule:: gaps.log + +.. autofunction:: init_logger \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.log_mem.rst.txt b/_sources/_autosummary/gaps.log.log_mem.rst.txt new file mode 100644 index 00000000..cfff1095 --- /dev/null +++ b/_sources/_autosummary/gaps.log.log_mem.rst.txt @@ -0,0 +1,6 @@ +gaps.log.log\_mem +================= + +.. currentmodule:: gaps.log + +.. autofunction:: log_mem \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.log_versions.rst.txt b/_sources/_autosummary/gaps.log.log_versions.rst.txt new file mode 100644 index 00000000..f08e4bde --- /dev/null +++ b/_sources/_autosummary/gaps.log.log_versions.rst.txt @@ -0,0 +1,6 @@ +gaps.log.log\_versions +====================== + +.. currentmodule:: gaps.log + +.. autofunction:: log_versions \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.make_handler.rst.txt b/_sources/_autosummary/gaps.log.make_handler.rst.txt new file mode 100644 index 00000000..cf8fa73a --- /dev/null +++ b/_sources/_autosummary/gaps.log.make_handler.rst.txt @@ -0,0 +1,6 @@ +gaps.log.make\_handler +====================== + +.. currentmodule:: gaps.log + +.. autofunction:: make_handler \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.make_log_file_handler.rst.txt b/_sources/_autosummary/gaps.log.make_log_file_handler.rst.txt new file mode 100644 index 00000000..2b74f692 --- /dev/null +++ b/_sources/_autosummary/gaps.log.make_log_file_handler.rst.txt @@ -0,0 +1,6 @@ +gaps.log.make\_log\_file\_handler +================================= + +.. currentmodule:: gaps.log + +.. autofunction:: make_log_file_handler \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.make_log_stream_handler.rst.txt b/_sources/_autosummary/gaps.log.make_log_stream_handler.rst.txt new file mode 100644 index 00000000..74d43383 --- /dev/null +++ b/_sources/_autosummary/gaps.log.make_log_stream_handler.rst.txt @@ -0,0 +1,6 @@ +gaps.log.make\_log\_stream\_handler +=================================== + +.. currentmodule:: gaps.log + +.. autofunction:: make_log_stream_handler \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.print_logging_info.rst.txt b/_sources/_autosummary/gaps.log.print_logging_info.rst.txt new file mode 100644 index 00000000..029133da --- /dev/null +++ b/_sources/_autosummary/gaps.log.print_logging_info.rst.txt @@ -0,0 +1,6 @@ +gaps.log.print\_logging\_info +============================= + +.. currentmodule:: gaps.log + +.. autofunction:: print_logging_info \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.print_logging_info_all_libraries.rst.txt b/_sources/_autosummary/gaps.log.print_logging_info_all_libraries.rst.txt new file mode 100644 index 00000000..4f23ae2c --- /dev/null +++ b/_sources/_autosummary/gaps.log.print_logging_info_all_libraries.rst.txt @@ -0,0 +1,6 @@ +gaps.log.print\_logging\_info\_all\_libraries +============================================= + +.. currentmodule:: gaps.log + +.. autofunction:: print_logging_info_all_libraries \ No newline at end of file diff --git a/_sources/_autosummary/gaps.log.rst.txt b/_sources/_autosummary/gaps.log.rst.txt new file mode 100644 index 00000000..9ccc0f77 --- /dev/null +++ b/_sources/_autosummary/gaps.log.rst.txt @@ -0,0 +1,45 @@ +gaps.log +======== + +.. automodule:: gaps.log + + + + .. rubric:: Module attributes + + .. autosummary:: + :toctree: + + FORMAT + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + add_handlers + init_logger + log_mem + log_versions + make_handler + make_log_file_handler + make_log_stream_handler + print_logging_info + print_logging_info_all_libraries + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.pipeline.Pipeline.rst.txt b/_sources/_autosummary/gaps.pipeline.Pipeline.rst.txt new file mode 100644 index 00000000..3439f42f --- /dev/null +++ b/_sources/_autosummary/gaps.pipeline.Pipeline.rst.txt @@ -0,0 +1,33 @@ +gaps.pipeline.Pipeline +====================== + +.. currentmodule:: gaps.pipeline + +.. autoclass:: Pipeline + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Pipeline.cancel_all + ~Pipeline.run + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Pipeline.COMMANDS + ~Pipeline.name + ~Pipeline.status + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.pipeline.parse_previous_status.rst.txt b/_sources/_autosummary/gaps.pipeline.parse_previous_status.rst.txt new file mode 100644 index 00000000..09201591 --- /dev/null +++ b/_sources/_autosummary/gaps.pipeline.parse_previous_status.rst.txt @@ -0,0 +1,6 @@ +gaps.pipeline.parse\_previous\_status +===================================== + +.. currentmodule:: gaps.pipeline + +.. autofunction:: parse_previous_status \ No newline at end of file diff --git a/_sources/_autosummary/gaps.pipeline.rst.txt b/_sources/_autosummary/gaps.pipeline.rst.txt new file mode 100644 index 00000000..98ea2fc3 --- /dev/null +++ b/_sources/_autosummary/gaps.pipeline.rst.txt @@ -0,0 +1,38 @@ +gaps.pipeline +============= + +.. automodule:: gaps.pipeline + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + parse_previous_status + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + Pipeline + + + + + + + + + diff --git a/_sources/_autosummary/gaps.project_points.ProjectPoints.rst.txt b/_sources/_autosummary/gaps.project_points.ProjectPoints.rst.txt new file mode 100644 index 00000000..cfcf936d --- /dev/null +++ b/_sources/_autosummary/gaps.project_points.ProjectPoints.rst.txt @@ -0,0 +1,36 @@ +gaps.project\_points.ProjectPoints +================================== + +.. currentmodule:: gaps.project_points + +.. autoclass:: ProjectPoints + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~ProjectPoints.from_range + ~ProjectPoints.get_sites_from_key + ~ProjectPoints.index + ~ProjectPoints.join_df + ~ProjectPoints.split + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ProjectPoints.df + ~ProjectPoints.gids + ~ProjectPoints.sites_as_slice + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.project_points.rst.txt b/_sources/_autosummary/gaps.project_points.rst.txt new file mode 100644 index 00000000..d2fc8a72 --- /dev/null +++ b/_sources/_autosummary/gaps.project_points.rst.txt @@ -0,0 +1,31 @@ +gaps.project\_points +==================== + +.. automodule:: gaps.project_points + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + ProjectPoints + + + + + + + + + diff --git a/_sources/_autosummary/gaps.rst.txt b/_sources/_autosummary/gaps.rst.txt new file mode 100644 index 00000000..8ffbf93f --- /dev/null +++ b/_sources/_autosummary/gaps.rst.txt @@ -0,0 +1,43 @@ +gaps +==== + +.. automodule:: gaps + + + + + + + + + + + + + + + + + + + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + + gaps.batch + gaps.cli + gaps.collection + gaps.config + gaps.exceptions + gaps.hpc + gaps.legacy + gaps.log + gaps.pipeline + gaps.project_points + gaps.status + gaps.utilities + gaps.version + gaps.warnings + diff --git a/_sources/_autosummary/gaps.status.HardwareOption.rst.txt b/_sources/_autosummary/gaps.status.HardwareOption.rst.txt new file mode 100644 index 00000000..81098f65 --- /dev/null +++ b/_sources/_autosummary/gaps.status.HardwareOption.rst.txt @@ -0,0 +1,33 @@ +gaps.status.HardwareOption +========================== + +.. currentmodule:: gaps.status + +.. autoclass:: HardwareOption + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~HardwareOption.reset_all_cached_queries + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~HardwareOption.LOCAL + ~HardwareOption.KESTREL + ~HardwareOption.EAGLE + ~HardwareOption.PEREGRINE + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.HardwareStatusRetriever.rst.txt b/_sources/_autosummary/gaps.status.HardwareStatusRetriever.rst.txt new file mode 100644 index 00000000..7823c4d4 --- /dev/null +++ b/_sources/_autosummary/gaps.status.HardwareStatusRetriever.rst.txt @@ -0,0 +1,23 @@ +gaps.status.HardwareStatusRetriever +=================================== + +.. currentmodule:: gaps.status + +.. autoclass:: HardwareStatusRetriever + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.QOSOption.rst.txt b/_sources/_autosummary/gaps.status.QOSOption.rst.txt new file mode 100644 index 00000000..021e2b0f --- /dev/null +++ b/_sources/_autosummary/gaps.status.QOSOption.rst.txt @@ -0,0 +1,26 @@ +gaps.status.QOSOption +===================== + +.. currentmodule:: gaps.status + +.. autoclass:: QOSOption + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~QOSOption.NORMAL + ~QOSOption.HIGH + ~QOSOption.UNSPECIFIED + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.Status.rst.txt b/_sources/_autosummary/gaps.status.Status.rst.txt new file mode 100644 index 00000000..1d01967b --- /dev/null +++ b/_sources/_autosummary/gaps.status.Status.rst.txt @@ -0,0 +1,52 @@ +gaps.status.Status +================== + +.. currentmodule:: gaps.status + +.. autoclass:: Status + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Status.as_df + ~Status.clear + ~Status.copy + ~Status.dump + ~Status.fromkeys + ~Status.get + ~Status.items + ~Status.job_exists + ~Status.keys + ~Status.make_single_job_file + ~Status.mark_job_as_submitted + ~Status.parse_command_status + ~Status.pop + ~Status.popitem + ~Status.record_monitor_pid + ~Status.reload + ~Status.retrieve_job_status + ~Status.setdefault + ~Status.update + ~Status.update_from_all_job_files + ~Status.update_job_status + ~Status.values + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Status.job_hardware + ~Status.job_ids + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.StatusField.rst.txt b/_sources/_autosummary/gaps.status.StatusField.rst.txt new file mode 100644 index 00000000..6eecd61a --- /dev/null +++ b/_sources/_autosummary/gaps.status.StatusField.rst.txt @@ -0,0 +1,35 @@ +gaps.status.StatusField +======================= + +.. currentmodule:: gaps.status + +.. autoclass:: StatusField + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~StatusField.JOB_ID + ~StatusField.JOB_STATUS + ~StatusField.PIPELINE_INDEX + ~StatusField.HARDWARE + ~StatusField.QOS + ~StatusField.OUT_FILE + ~StatusField.TIME_SUBMITTED + ~StatusField.TIME_START + ~StatusField.TIME_END + ~StatusField.TOTAL_RUNTIME + ~StatusField.RUNTIME_SECONDS + ~StatusField.MONITOR_PID + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.StatusOption.rst.txt b/_sources/_autosummary/gaps.status.StatusOption.rst.txt new file mode 100644 index 00000000..34858e0a --- /dev/null +++ b/_sources/_autosummary/gaps.status.StatusOption.rst.txt @@ -0,0 +1,29 @@ +gaps.status.StatusOption +======================== + +.. currentmodule:: gaps.status + +.. autoclass:: StatusOption + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~StatusOption.NOT_SUBMITTED + ~StatusOption.SUBMITTED + ~StatusOption.RUNNING + ~StatusOption.SUCCESSFUL + ~StatusOption.FAILED + ~StatusOption.COMPLETE + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.StatusUpdates.rst.txt b/_sources/_autosummary/gaps.status.StatusUpdates.rst.txt new file mode 100644 index 00000000..e3941e81 --- /dev/null +++ b/_sources/_autosummary/gaps.status.StatusUpdates.rst.txt @@ -0,0 +1,23 @@ +gaps.status.StatusUpdates +========================= + +.. currentmodule:: gaps.status + +.. autoclass:: StatusUpdates + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.status.rst.txt b/_sources/_autosummary/gaps.status.rst.txt new file mode 100644 index 00000000..3f767d81 --- /dev/null +++ b/_sources/_autosummary/gaps.status.rst.txt @@ -0,0 +1,37 @@ +gaps.status +=========== + +.. automodule:: gaps.status + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + HardwareOption + HardwareStatusRetriever + QOSOption + Status + StatusField + StatusOption + StatusUpdates + + + + + + + + + diff --git a/_sources/_autosummary/gaps.utilities.CaseInsensitiveEnum.rst.txt b/_sources/_autosummary/gaps.utilities.CaseInsensitiveEnum.rst.txt new file mode 100644 index 00000000..8c03934f --- /dev/null +++ b/_sources/_autosummary/gaps.utilities.CaseInsensitiveEnum.rst.txt @@ -0,0 +1,24 @@ +gaps.utilities.CaseInsensitiveEnum +================================== + +.. currentmodule:: gaps.utilities + +.. autoclass:: CaseInsensitiveEnum + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~CaseInsensitiveEnum.members_as_str + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/gaps.utilities.project_points_from_container_or_slice.rst.txt b/_sources/_autosummary/gaps.utilities.project_points_from_container_or_slice.rst.txt new file mode 100644 index 00000000..775cdeb4 --- /dev/null +++ b/_sources/_autosummary/gaps.utilities.project_points_from_container_or_slice.rst.txt @@ -0,0 +1,6 @@ +gaps.utilities.project\_points\_from\_container\_or\_slice +========================================================== + +.. currentmodule:: gaps.utilities + +.. autofunction:: project_points_from_container_or_slice \ No newline at end of file diff --git a/_sources/_autosummary/gaps.utilities.recursively_update_dict.rst.txt b/_sources/_autosummary/gaps.utilities.recursively_update_dict.rst.txt new file mode 100644 index 00000000..74a86d74 --- /dev/null +++ b/_sources/_autosummary/gaps.utilities.recursively_update_dict.rst.txt @@ -0,0 +1,6 @@ +gaps.utilities.recursively\_update\_dict +======================================== + +.. currentmodule:: gaps.utilities + +.. autofunction:: recursively_update_dict \ No newline at end of file diff --git a/_sources/_autosummary/gaps.utilities.resolve_path.rst.txt b/_sources/_autosummary/gaps.utilities.resolve_path.rst.txt new file mode 100644 index 00000000..aad06813 --- /dev/null +++ b/_sources/_autosummary/gaps.utilities.resolve_path.rst.txt @@ -0,0 +1,6 @@ +gaps.utilities.resolve\_path +============================ + +.. currentmodule:: gaps.utilities + +.. autofunction:: resolve_path \ No newline at end of file diff --git a/_sources/_autosummary/gaps.utilities.rst.txt b/_sources/_autosummary/gaps.utilities.rst.txt new file mode 100644 index 00000000..beaac6a4 --- /dev/null +++ b/_sources/_autosummary/gaps.utilities.rst.txt @@ -0,0 +1,40 @@ +gaps.utilities +============== + +.. automodule:: gaps.utilities + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + project_points_from_container_or_slice + recursively_update_dict + resolve_path + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + CaseInsensitiveEnum + + + + + + + + + diff --git a/_sources/_autosummary/gaps.version.rst.txt b/_sources/_autosummary/gaps.version.rst.txt new file mode 100644 index 00000000..08ca7d4e --- /dev/null +++ b/_sources/_autosummary/gaps.version.rst.txt @@ -0,0 +1,23 @@ +gaps.version +============ + +.. automodule:: gaps.version + + + + + + + + + + + + + + + + + + + diff --git a/_sources/_autosummary/gaps.warnings.gapsCollectionWarning.rst.txt b/_sources/_autosummary/gaps.warnings.gapsCollectionWarning.rst.txt new file mode 100644 index 00000000..4569671b --- /dev/null +++ b/_sources/_autosummary/gaps.warnings.gapsCollectionWarning.rst.txt @@ -0,0 +1,6 @@ +gaps.warnings.gapsCollectionWarning +=================================== + +.. currentmodule:: gaps.warnings + +.. autoexception:: gapsCollectionWarning \ No newline at end of file diff --git a/_sources/_autosummary/gaps.warnings.gapsDeprecationWarning.rst.txt b/_sources/_autosummary/gaps.warnings.gapsDeprecationWarning.rst.txt new file mode 100644 index 00000000..0459af46 --- /dev/null +++ b/_sources/_autosummary/gaps.warnings.gapsDeprecationWarning.rst.txt @@ -0,0 +1,6 @@ +gaps.warnings.gapsDeprecationWarning +==================================== + +.. currentmodule:: gaps.warnings + +.. autoexception:: gapsDeprecationWarning \ No newline at end of file diff --git a/_sources/_autosummary/gaps.warnings.gapsHPCWarning.rst.txt b/_sources/_autosummary/gaps.warnings.gapsHPCWarning.rst.txt new file mode 100644 index 00000000..16a3f2a7 --- /dev/null +++ b/_sources/_autosummary/gaps.warnings.gapsHPCWarning.rst.txt @@ -0,0 +1,6 @@ +gaps.warnings.gapsHPCWarning +============================ + +.. currentmodule:: gaps.warnings + +.. autoexception:: gapsHPCWarning \ No newline at end of file diff --git a/_sources/_autosummary/gaps.warnings.gapsWarning.rst.txt b/_sources/_autosummary/gaps.warnings.gapsWarning.rst.txt new file mode 100644 index 00000000..e07a1fb9 --- /dev/null +++ b/_sources/_autosummary/gaps.warnings.gapsWarning.rst.txt @@ -0,0 +1,6 @@ +gaps.warnings.gapsWarning +========================= + +.. currentmodule:: gaps.warnings + +.. autoexception:: gapsWarning \ No newline at end of file diff --git a/_sources/_autosummary/gaps.warnings.rst.txt b/_sources/_autosummary/gaps.warnings.rst.txt new file mode 100644 index 00000000..d2137032 --- /dev/null +++ b/_sources/_autosummary/gaps.warnings.rst.txt @@ -0,0 +1,33 @@ +gaps.warnings +============= + +.. automodule:: gaps.warnings + + + + + + + + + + + + + + + + .. rubric:: Exceptions + + .. autosummary:: + :toctree: + + gapsCollectionWarning + gapsDeprecationWarning + gapsHPCWarning + gapsWarning + + + + + diff --git a/_sources/api.rst.txt b/_sources/api.rst.txt new file mode 100644 index 00000000..dd7c1023 --- /dev/null +++ b/_sources/api.rst.txt @@ -0,0 +1,6 @@ +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + gaps diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 00000000..a59d5e11 --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,23 @@ +.. toctree:: + :hidden: + + Home page + Installation and Usage + API reference <_autosummary/gaps> + +GAPs documentation +****************** + +What is GAPs? +============= + +.. include:: ../../README.rst + :start-after: inclusion-intro + :end-before: Installing GAPs + + +Acknowledgments +=============== + +.. include:: ../../README.rst + :start-after: inclusion-ack diff --git a/_sources/misc/installation.rst.txt b/_sources/misc/installation.rst.txt new file mode 100644 index 00000000..4fe7da03 --- /dev/null +++ b/_sources/misc/installation.rst.txt @@ -0,0 +1,6 @@ +Installation +============ + +.. include:: ../../../README.rst + :start-after: Installing GAPs + :end-before: Development diff --git a/_sources/misc/installation_usage.rst.txt b/_sources/misc/installation_usage.rst.txt new file mode 100644 index 00000000..96617243 --- /dev/null +++ b/_sources/misc/installation_usage.rst.txt @@ -0,0 +1,5 @@ +Installation and Usage +====================== +.. toctree:: + + installation diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000..e760386b --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 270px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/check-solid.svg b/_static/check-solid.svg new file mode 100644 index 00000000..92fad4b5 --- /dev/null +++ b/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/_static/clipboard.min.js b/_static/clipboard.min.js new file mode 100644 index 00000000..54b3c463 --- /dev/null +++ b/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/_static/copybutton.css b/_static/copybutton.css new file mode 100644 index 00000000..f1916ec7 --- /dev/null +++ b/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/_static/copybutton.js b/_static/copybutton.js new file mode 100644 index 00000000..2ea7ff3e --- /dev/null +++ b/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/_static/copybutton_funcs.js b/_static/copybutton_funcs.js new file mode 100644 index 00000000..dbe1aaad --- /dev/null +++ b/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/_static/custom.css b/_static/custom.css new file mode 100644 index 00000000..3674c5a5 --- /dev/null +++ b/_static/custom.css @@ -0,0 +1,44 @@ +/* Override nav bar color */ +/*.wy-side-nav-search { + background-color: #fbfbb6; +} +.wy-side-nav-search > a { + color: #b2355c +}*/ + +/* Override text bar color */ +/*.caption-text { + color: #b2355c; +}*/ + +/* Override code signature colour */ +/*.rst-content dl:not(.docutils) dt { + background: #fbfbb6; + color: #b2355c; + border-top: solid 3px #b2355c; +}*/ + +/* Override hyperlink colour */ +/* a { + color: #b2355c; +}*/ + +/* Make content width wider*/ +.wy-nav-content { + max-width: 60% !important; +} + + +.wy-side-nav-search { + display: block; + width: 300px; + padding: 0.809em; + margin-bottom: 0.809em; + z-index: 200; + background-color: #fcfcfc; + text-align: center; + padding: 0.809em; + display: block; + color: #fcfcfc; + margin-bottom: 0.809em; +} diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 00000000..a495f821 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '0.4.2', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: true, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/language_data.js b/_static/language_data.js new file mode 100644 index 00000000..250f5665 --- /dev/null +++ b/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000..997797f2 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,152 @@ +html[data-theme="light"] .highlight pre { line-height: 125%; } +html[data-theme="light"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight .hll { background-color: #7971292e } +html[data-theme="light"] .highlight { background: #fefefe; color: #545454 } +html[data-theme="light"] .highlight .c { color: #797129 } /* Comment */ +html[data-theme="light"] .highlight .err { color: #d91e18 } /* Error */ +html[data-theme="light"] .highlight .k { color: #7928a1 } /* Keyword */ +html[data-theme="light"] .highlight .l { color: #797129 } /* Literal */ +html[data-theme="light"] .highlight .n { color: #545454 } /* Name */ +html[data-theme="light"] .highlight .o { color: #008000 } /* Operator */ +html[data-theme="light"] .highlight .p { color: #545454 } /* Punctuation */ +html[data-theme="light"] .highlight .ch { color: #797129 } /* Comment.Hashbang */ +html[data-theme="light"] .highlight .cm { color: #797129 } /* Comment.Multiline */ +html[data-theme="light"] .highlight .cp { color: #797129 } /* Comment.Preproc */ +html[data-theme="light"] .highlight .cpf { color: #797129 } /* Comment.PreprocFile */ +html[data-theme="light"] .highlight .c1 { color: #797129 } /* Comment.Single */ +html[data-theme="light"] .highlight .cs { color: #797129 } /* Comment.Special */ +html[data-theme="light"] .highlight .gd { color: #007faa } /* Generic.Deleted */ +html[data-theme="light"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="light"] .highlight .gh { color: #007faa } /* Generic.Heading */ +html[data-theme="light"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="light"] .highlight .gu { color: #007faa } /* Generic.Subheading */ +html[data-theme="light"] .highlight .kc { color: #7928a1 } /* Keyword.Constant */ +html[data-theme="light"] .highlight .kd { color: #7928a1 } /* Keyword.Declaration */ +html[data-theme="light"] .highlight .kn { color: #7928a1 } /* Keyword.Namespace */ +html[data-theme="light"] .highlight .kp { color: #7928a1 } /* Keyword.Pseudo */ +html[data-theme="light"] .highlight .kr { color: #7928a1 } /* Keyword.Reserved */ +html[data-theme="light"] .highlight .kt { color: #797129 } /* Keyword.Type */ +html[data-theme="light"] .highlight .ld { color: #797129 } /* Literal.Date */ +html[data-theme="light"] .highlight .m { color: #797129 } /* Literal.Number */ +html[data-theme="light"] .highlight .s { color: #008000 } /* Literal.String */ +html[data-theme="light"] .highlight .na { color: #797129 } /* Name.Attribute */ +html[data-theme="light"] .highlight .nb { color: #797129 } /* Name.Builtin */ +html[data-theme="light"] .highlight .nc { color: #007faa } /* Name.Class */ +html[data-theme="light"] .highlight .no { color: #007faa } /* Name.Constant */ +html[data-theme="light"] .highlight .nd { color: #797129 } /* Name.Decorator */ +html[data-theme="light"] .highlight .ni { color: #008000 } /* Name.Entity */ +html[data-theme="light"] .highlight .ne { color: #7928a1 } /* Name.Exception */ +html[data-theme="light"] .highlight .nf { color: #007faa } /* Name.Function */ +html[data-theme="light"] .highlight .nl { color: #797129 } /* Name.Label */ +html[data-theme="light"] .highlight .nn { color: #545454 } /* Name.Namespace */ +html[data-theme="light"] .highlight .nx { color: #545454 } /* Name.Other */ +html[data-theme="light"] .highlight .py { color: #007faa } /* Name.Property */ +html[data-theme="light"] .highlight .nt { color: #007faa } /* Name.Tag */ +html[data-theme="light"] .highlight .nv { color: #d91e18 } /* Name.Variable */ +html[data-theme="light"] .highlight .ow { color: #7928a1 } /* Operator.Word */ +html[data-theme="light"] .highlight .pm { color: #545454 } /* Punctuation.Marker */ +html[data-theme="light"] .highlight .w { color: #545454 } /* Text.Whitespace */ +html[data-theme="light"] .highlight .mb { color: #797129 } /* Literal.Number.Bin */ +html[data-theme="light"] .highlight .mf { color: #797129 } /* Literal.Number.Float */ +html[data-theme="light"] .highlight .mh { color: #797129 } /* Literal.Number.Hex */ +html[data-theme="light"] .highlight .mi { color: #797129 } /* Literal.Number.Integer */ +html[data-theme="light"] .highlight .mo { color: #797129 } /* Literal.Number.Oct */ +html[data-theme="light"] .highlight .sa { color: #008000 } /* Literal.String.Affix */ +html[data-theme="light"] .highlight .sb { color: #008000 } /* Literal.String.Backtick */ +html[data-theme="light"] .highlight .sc { color: #008000 } /* Literal.String.Char */ +html[data-theme="light"] .highlight .dl { color: #008000 } /* Literal.String.Delimiter */ +html[data-theme="light"] .highlight .sd { color: #008000 } /* Literal.String.Doc */ +html[data-theme="light"] .highlight .s2 { color: #008000 } /* Literal.String.Double */ +html[data-theme="light"] .highlight .se { color: #008000 } /* Literal.String.Escape */ +html[data-theme="light"] .highlight .sh { color: #008000 } /* Literal.String.Heredoc */ +html[data-theme="light"] .highlight .si { color: #008000 } /* Literal.String.Interpol */ +html[data-theme="light"] .highlight .sx { color: #008000 } /* Literal.String.Other */ +html[data-theme="light"] .highlight .sr { color: #d91e18 } /* Literal.String.Regex */ +html[data-theme="light"] .highlight .s1 { color: #008000 } /* Literal.String.Single */ +html[data-theme="light"] .highlight .ss { color: #007faa } /* Literal.String.Symbol */ +html[data-theme="light"] .highlight .bp { color: #797129 } /* Name.Builtin.Pseudo */ +html[data-theme="light"] .highlight .fm { color: #007faa } /* Name.Function.Magic */ +html[data-theme="light"] .highlight .vc { color: #d91e18 } /* Name.Variable.Class */ +html[data-theme="light"] .highlight .vg { color: #d91e18 } /* Name.Variable.Global */ +html[data-theme="light"] .highlight .vi { color: #d91e18 } /* Name.Variable.Instance */ +html[data-theme="light"] .highlight .vm { color: #797129 } /* Name.Variable.Magic */ +html[data-theme="light"] .highlight .il { color: #797129 } /* Literal.Number.Integer.Long */ +html[data-theme="dark"] .highlight pre { line-height: 125%; } +html[data-theme="dark"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight .hll { background-color: #ffd9002e } +html[data-theme="dark"] .highlight { background: #2b2b2b; color: #f8f8f2 } +html[data-theme="dark"] .highlight .c { color: #ffd900 } /* Comment */ +html[data-theme="dark"] .highlight .err { color: #ffa07a } /* Error */ +html[data-theme="dark"] .highlight .k { color: #dcc6e0 } /* Keyword */ +html[data-theme="dark"] .highlight .l { color: #ffd900 } /* Literal */ +html[data-theme="dark"] .highlight .n { color: #f8f8f2 } /* Name */ +html[data-theme="dark"] .highlight .o { color: #abe338 } /* Operator */ +html[data-theme="dark"] .highlight .p { color: #f8f8f2 } /* Punctuation */ +html[data-theme="dark"] .highlight .ch { color: #ffd900 } /* Comment.Hashbang */ +html[data-theme="dark"] .highlight .cm { color: #ffd900 } /* Comment.Multiline */ +html[data-theme="dark"] .highlight .cp { color: #ffd900 } /* Comment.Preproc */ +html[data-theme="dark"] .highlight .cpf { color: #ffd900 } /* Comment.PreprocFile */ +html[data-theme="dark"] .highlight .c1 { color: #ffd900 } /* Comment.Single */ +html[data-theme="dark"] .highlight .cs { color: #ffd900 } /* Comment.Special */ +html[data-theme="dark"] .highlight .gd { color: #00e0e0 } /* Generic.Deleted */ +html[data-theme="dark"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="dark"] .highlight .gh { color: #00e0e0 } /* Generic.Heading */ +html[data-theme="dark"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="dark"] .highlight .gu { color: #00e0e0 } /* Generic.Subheading */ +html[data-theme="dark"] .highlight .kc { color: #dcc6e0 } /* Keyword.Constant */ +html[data-theme="dark"] .highlight .kd { color: #dcc6e0 } /* Keyword.Declaration */ +html[data-theme="dark"] .highlight .kn { color: #dcc6e0 } /* Keyword.Namespace */ +html[data-theme="dark"] .highlight .kp { color: #dcc6e0 } /* Keyword.Pseudo */ +html[data-theme="dark"] .highlight .kr { color: #dcc6e0 } /* Keyword.Reserved */ +html[data-theme="dark"] .highlight .kt { color: #ffd900 } /* Keyword.Type */ +html[data-theme="dark"] .highlight .ld { color: #ffd900 } /* Literal.Date */ +html[data-theme="dark"] .highlight .m { color: #ffd900 } /* Literal.Number */ +html[data-theme="dark"] .highlight .s { color: #abe338 } /* Literal.String */ +html[data-theme="dark"] .highlight .na { color: #ffd900 } /* Name.Attribute */ +html[data-theme="dark"] .highlight .nb { color: #ffd900 } /* Name.Builtin */ +html[data-theme="dark"] .highlight .nc { color: #00e0e0 } /* Name.Class */ +html[data-theme="dark"] .highlight .no { color: #00e0e0 } /* Name.Constant */ +html[data-theme="dark"] .highlight .nd { color: #ffd900 } /* Name.Decorator */ +html[data-theme="dark"] .highlight .ni { color: #abe338 } /* Name.Entity */ +html[data-theme="dark"] .highlight .ne { color: #dcc6e0 } /* Name.Exception */ +html[data-theme="dark"] .highlight .nf { color: #00e0e0 } /* Name.Function */ +html[data-theme="dark"] .highlight .nl { color: #ffd900 } /* Name.Label */ +html[data-theme="dark"] .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ +html[data-theme="dark"] .highlight .nx { color: #f8f8f2 } /* Name.Other */ +html[data-theme="dark"] .highlight .py { color: #00e0e0 } /* Name.Property */ +html[data-theme="dark"] .highlight .nt { color: #00e0e0 } /* Name.Tag */ +html[data-theme="dark"] .highlight .nv { color: #ffa07a } /* Name.Variable */ +html[data-theme="dark"] .highlight .ow { color: #dcc6e0 } /* Operator.Word */ +html[data-theme="dark"] .highlight .pm { color: #f8f8f2 } /* Punctuation.Marker */ +html[data-theme="dark"] .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ +html[data-theme="dark"] .highlight .mb { color: #ffd900 } /* Literal.Number.Bin */ +html[data-theme="dark"] .highlight .mf { color: #ffd900 } /* Literal.Number.Float */ +html[data-theme="dark"] .highlight .mh { color: #ffd900 } /* Literal.Number.Hex */ +html[data-theme="dark"] .highlight .mi { color: #ffd900 } /* Literal.Number.Integer */ +html[data-theme="dark"] .highlight .mo { color: #ffd900 } /* Literal.Number.Oct */ +html[data-theme="dark"] .highlight .sa { color: #abe338 } /* Literal.String.Affix */ +html[data-theme="dark"] .highlight .sb { color: #abe338 } /* Literal.String.Backtick */ +html[data-theme="dark"] .highlight .sc { color: #abe338 } /* Literal.String.Char */ +html[data-theme="dark"] .highlight .dl { color: #abe338 } /* Literal.String.Delimiter */ +html[data-theme="dark"] .highlight .sd { color: #abe338 } /* Literal.String.Doc */ +html[data-theme="dark"] .highlight .s2 { color: #abe338 } /* Literal.String.Double */ +html[data-theme="dark"] .highlight .se { color: #abe338 } /* Literal.String.Escape */ +html[data-theme="dark"] .highlight .sh { color: #abe338 } /* Literal.String.Heredoc */ +html[data-theme="dark"] .highlight .si { color: #abe338 } /* Literal.String.Interpol */ +html[data-theme="dark"] .highlight .sx { color: #abe338 } /* Literal.String.Other */ +html[data-theme="dark"] .highlight .sr { color: #ffa07a } /* Literal.String.Regex */ +html[data-theme="dark"] .highlight .s1 { color: #abe338 } /* Literal.String.Single */ +html[data-theme="dark"] .highlight .ss { color: #00e0e0 } /* Literal.String.Symbol */ +html[data-theme="dark"] .highlight .bp { color: #ffd900 } /* Name.Builtin.Pseudo */ +html[data-theme="dark"] .highlight .fm { color: #00e0e0 } /* Name.Function.Magic */ +html[data-theme="dark"] .highlight .vc { color: #ffa07a } /* Name.Variable.Class */ +html[data-theme="dark"] .highlight .vg { color: #ffa07a } /* Name.Variable.Global */ +html[data-theme="dark"] .highlight .vi { color: #ffa07a } /* Name.Variable.Instance */ +html[data-theme="dark"] .highlight .vm { color: #ffd900 } /* Name.Variable.Magic */ +html[data-theme="dark"] .highlight .il { color: #ffd900 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/scripts/bootstrap.js b/_static/scripts/bootstrap.js new file mode 100644 index 00000000..766173ab --- /dev/null +++ b/_static/scripts/bootstrap.js @@ -0,0 +1,3 @@ +/*! For license information please see bootstrap.js.LICENSE.txt */ +(()=>{"use strict";var t={d:(e,i)=>{for(var n in i)t.o(i,n)&&!t.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:i[n]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{afterMain:()=>w,afterRead:()=>b,afterWrite:()=>C,applyStyles:()=>$,arrow:()=>G,auto:()=>r,basePlacements:()=>a,beforeMain:()=>v,beforeRead:()=>m,beforeWrite:()=>A,bottom:()=>n,clippingParents:()=>h,computeStyles:()=>et,createPopper:()=>Dt,createPopperBase:()=>Lt,createPopperLite:()=>$t,detectOverflow:()=>mt,end:()=>c,eventListeners:()=>nt,flip:()=>_t,hide:()=>yt,left:()=>o,main:()=>y,modifierPhases:()=>T,offset:()=>wt,placements:()=>g,popper:()=>d,popperGenerator:()=>kt,popperOffsets:()=>At,preventOverflow:()=>Et,read:()=>_,reference:()=>f,right:()=>s,start:()=>l,top:()=>i,variationPlacements:()=>p,viewport:()=>u,write:()=>E});var i="top",n="bottom",s="right",o="left",r="auto",a=[i,n,s,o],l="start",c="end",h="clippingParents",u="viewport",d="popper",f="reference",p=a.reduce((function(t,e){return t.concat([e+"-"+l,e+"-"+c])}),[]),g=[].concat(a,[r]).reduce((function(t,e){return t.concat([e,e+"-"+l,e+"-"+c])}),[]),m="beforeRead",_="read",b="afterRead",v="beforeMain",y="main",w="afterMain",A="beforeWrite",E="write",C="afterWrite",T=[m,_,b,v,y,w,A,E,C];function O(t){return t?(t.nodeName||"").toLowerCase():null}function x(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function k(t){return t instanceof x(t).Element||t instanceof Element}function L(t){return t instanceof x(t).HTMLElement||t instanceof HTMLElement}function D(t){return"undefined"!=typeof ShadowRoot&&(t instanceof x(t).ShadowRoot||t instanceof ShadowRoot)}const $={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];L(s)&&O(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});L(n)&&O(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function S(t){return t.split("-")[0]}var I=Math.max,N=Math.min,P=Math.round;function j(){var t=navigator.userAgentData;return null!=t&&t.brands&&Array.isArray(t.brands)?t.brands.map((function(t){return t.brand+"/"+t.version})).join(" "):navigator.userAgent}function M(){return!/^((?!chrome|android).)*safari/i.test(j())}function H(t,e,i){void 0===e&&(e=!1),void 0===i&&(i=!1);var n=t.getBoundingClientRect(),s=1,o=1;e&&L(t)&&(s=t.offsetWidth>0&&P(n.width)/t.offsetWidth||1,o=t.offsetHeight>0&&P(n.height)/t.offsetHeight||1);var r=(k(t)?x(t):window).visualViewport,a=!M()&&i,l=(n.left+(a&&r?r.offsetLeft:0))/s,c=(n.top+(a&&r?r.offsetTop:0))/o,h=n.width/s,u=n.height/o;return{width:h,height:u,top:c,right:l+h,bottom:c+u,left:l,x:l,y:c}}function W(t){var e=H(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function F(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&D(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function B(t){return x(t).getComputedStyle(t)}function z(t){return["table","td","th"].indexOf(O(t))>=0}function q(t){return((k(t)?t.ownerDocument:t.document)||window.document).documentElement}function R(t){return"html"===O(t)?t:t.assignedSlot||t.parentNode||(D(t)?t.host:null)||q(t)}function V(t){return L(t)&&"fixed"!==B(t).position?t.offsetParent:null}function K(t){for(var e=x(t),i=V(t);i&&z(i)&&"static"===B(i).position;)i=V(i);return i&&("html"===O(i)||"body"===O(i)&&"static"===B(i).position)?e:i||function(t){var e=/firefox/i.test(j());if(/Trident/i.test(j())&&L(t)&&"fixed"===B(t).position)return null;var i=R(t);for(D(i)&&(i=i.host);L(i)&&["html","body"].indexOf(O(i))<0;){var n=B(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function Q(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}function X(t,e,i){return I(t,N(e,i))}function Y(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function U(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const G={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,r=t.state,l=t.name,c=t.options,h=r.elements.arrow,u=r.modifiersData.popperOffsets,d=S(r.placement),f=Q(d),p=[o,s].indexOf(d)>=0?"height":"width";if(h&&u){var g=function(t,e){return Y("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:U(t,a))}(c.padding,r),m=W(h),_="y"===f?i:o,b="y"===f?n:s,v=r.rects.reference[p]+r.rects.reference[f]-u[f]-r.rects.popper[p],y=u[f]-r.rects.reference[f],w=K(h),A=w?"y"===f?w.clientHeight||0:w.clientWidth||0:0,E=v/2-y/2,C=g[_],T=A-m[p]-g[b],O=A/2-m[p]/2+E,x=X(C,O,T),k=f;r.modifiersData[l]=((e={})[k]=x,e.centerOffset=x-O,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&F(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function J(t){return t.split("-")[1]}var Z={top:"auto",right:"auto",bottom:"auto",left:"auto"};function tt(t){var e,r=t.popper,a=t.popperRect,l=t.placement,h=t.variation,u=t.offsets,d=t.position,f=t.gpuAcceleration,p=t.adaptive,g=t.roundOffsets,m=t.isFixed,_=u.x,b=void 0===_?0:_,v=u.y,y=void 0===v?0:v,w="function"==typeof g?g({x:b,y}):{x:b,y};b=w.x,y=w.y;var A=u.hasOwnProperty("x"),E=u.hasOwnProperty("y"),C=o,T=i,O=window;if(p){var k=K(r),L="clientHeight",D="clientWidth";k===x(r)&&"static"!==B(k=q(r)).position&&"absolute"===d&&(L="scrollHeight",D="scrollWidth"),(l===i||(l===o||l===s)&&h===c)&&(T=n,y-=(m&&k===O&&O.visualViewport?O.visualViewport.height:k[L])-a.height,y*=f?1:-1),l!==o&&(l!==i&&l!==n||h!==c)||(C=s,b-=(m&&k===O&&O.visualViewport?O.visualViewport.width:k[D])-a.width,b*=f?1:-1)}var $,S=Object.assign({position:d},p&&Z),I=!0===g?function(t,e){var i=t.x,n=t.y,s=e.devicePixelRatio||1;return{x:P(i*s)/s||0,y:P(n*s)/s||0}}({x:b,y},x(r)):{x:b,y};return b=I.x,y=I.y,f?Object.assign({},S,(($={})[T]=E?"0":"",$[C]=A?"0":"",$.transform=(O.devicePixelRatio||1)<=1?"translate("+b+"px, "+y+"px)":"translate3d("+b+"px, "+y+"px, 0)",$)):Object.assign({},S,((e={})[T]=E?y+"px":"",e[C]=A?b+"px":"",e.transform="",e))}const et={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:S(e.placement),variation:J(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s,isFixed:"fixed"===e.options.strategy};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,tt(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,tt(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var it={passive:!0};const nt={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=x(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,it)})),a&&l.addEventListener("resize",i.update,it),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,it)})),a&&l.removeEventListener("resize",i.update,it)}},data:{}};var st={left:"right",right:"left",bottom:"top",top:"bottom"};function ot(t){return t.replace(/left|right|bottom|top/g,(function(t){return st[t]}))}var rt={start:"end",end:"start"};function at(t){return t.replace(/start|end/g,(function(t){return rt[t]}))}function lt(t){var e=x(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ct(t){return H(q(t)).left+lt(t).scrollLeft}function ht(t){var e=B(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function ut(t){return["html","body","#document"].indexOf(O(t))>=0?t.ownerDocument.body:L(t)&&ht(t)?t:ut(R(t))}function dt(t,e){var i;void 0===e&&(e=[]);var n=ut(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=x(n),r=s?[o].concat(o.visualViewport||[],ht(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(dt(R(r)))}function ft(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function pt(t,e,i){return e===u?ft(function(t,e){var i=x(t),n=q(t),s=i.visualViewport,o=n.clientWidth,r=n.clientHeight,a=0,l=0;if(s){o=s.width,r=s.height;var c=M();(c||!c&&"fixed"===e)&&(a=s.offsetLeft,l=s.offsetTop)}return{width:o,height:r,x:a+ct(t),y:l}}(t,i)):k(e)?function(t,e){var i=H(t,!1,"fixed"===e);return i.top=i.top+t.clientTop,i.left=i.left+t.clientLeft,i.bottom=i.top+t.clientHeight,i.right=i.left+t.clientWidth,i.width=t.clientWidth,i.height=t.clientHeight,i.x=i.left,i.y=i.top,i}(e,i):ft(function(t){var e,i=q(t),n=lt(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=I(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=I(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ct(t),l=-n.scrollTop;return"rtl"===B(s||i).direction&&(a+=I(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(q(t)))}function gt(t){var e,r=t.reference,a=t.element,h=t.placement,u=h?S(h):null,d=h?J(h):null,f=r.x+r.width/2-a.width/2,p=r.y+r.height/2-a.height/2;switch(u){case i:e={x:f,y:r.y-a.height};break;case n:e={x:f,y:r.y+r.height};break;case s:e={x:r.x+r.width,y:p};break;case o:e={x:r.x-a.width,y:p};break;default:e={x:r.x,y:r.y}}var g=u?Q(u):null;if(null!=g){var m="y"===g?"height":"width";switch(d){case l:e[g]=e[g]-(r[m]/2-a[m]/2);break;case c:e[g]=e[g]+(r[m]/2-a[m]/2)}}return e}function mt(t,e){void 0===e&&(e={});var o=e,r=o.placement,l=void 0===r?t.placement:r,c=o.strategy,p=void 0===c?t.strategy:c,g=o.boundary,m=void 0===g?h:g,_=o.rootBoundary,b=void 0===_?u:_,v=o.elementContext,y=void 0===v?d:v,w=o.altBoundary,A=void 0!==w&&w,E=o.padding,C=void 0===E?0:E,T=Y("number"!=typeof C?C:U(C,a)),x=y===d?f:d,D=t.rects.popper,$=t.elements[A?x:y],S=function(t,e,i,n){var s="clippingParents"===e?function(t){var e=dt(R(t)),i=["absolute","fixed"].indexOf(B(t).position)>=0&&L(t)?K(t):t;return k(i)?e.filter((function(t){return k(t)&&F(t,i)&&"body"!==O(t)})):[]}(t):[].concat(e),o=[].concat(s,[i]),r=o[0],a=o.reduce((function(e,i){var s=pt(t,i,n);return e.top=I(s.top,e.top),e.right=N(s.right,e.right),e.bottom=N(s.bottom,e.bottom),e.left=I(s.left,e.left),e}),pt(t,r,n));return a.width=a.right-a.left,a.height=a.bottom-a.top,a.x=a.left,a.y=a.top,a}(k($)?$:$.contextElement||q(t.elements.popper),m,b,p),P=H(t.elements.reference),j=gt({reference:P,element:D,strategy:"absolute",placement:l}),M=ft(Object.assign({},D,j)),W=y===d?M:P,z={top:S.top-W.top+T.top,bottom:W.bottom-S.bottom+T.bottom,left:S.left-W.left+T.left,right:W.right-S.right+T.right},V=t.modifiersData.offset;if(y===d&&V){var Q=V[l];Object.keys(z).forEach((function(t){var e=[s,n].indexOf(t)>=0?1:-1,o=[i,n].indexOf(t)>=0?"y":"x";z[t]+=Q[o]*e}))}return z}const _t={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,c=t.options,h=t.name;if(!e.modifiersData[h]._skip){for(var u=c.mainAxis,d=void 0===u||u,f=c.altAxis,m=void 0===f||f,_=c.fallbackPlacements,b=c.padding,v=c.boundary,y=c.rootBoundary,w=c.altBoundary,A=c.flipVariations,E=void 0===A||A,C=c.allowedAutoPlacements,T=e.options.placement,O=S(T),x=_||(O!==T&&E?function(t){if(S(t)===r)return[];var e=ot(t);return[at(t),e,at(e)]}(T):[ot(T)]),k=[T].concat(x).reduce((function(t,i){return t.concat(S(i)===r?function(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,l=i.flipVariations,c=i.allowedAutoPlacements,h=void 0===c?g:c,u=J(n),d=u?l?p:p.filter((function(t){return J(t)===u})):a,f=d.filter((function(t){return h.indexOf(t)>=0}));0===f.length&&(f=d);var m=f.reduce((function(e,i){return e[i]=mt(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[S(i)],e}),{});return Object.keys(m).sort((function(t,e){return m[t]-m[e]}))}(e,{placement:i,boundary:v,rootBoundary:y,padding:b,flipVariations:E,allowedAutoPlacements:C}):i)}),[]),L=e.rects.reference,D=e.rects.popper,$=new Map,I=!0,N=k[0],P=0;P=0,F=W?"width":"height",B=mt(e,{placement:j,boundary:v,rootBoundary:y,altBoundary:w,padding:b}),z=W?H?s:o:H?n:i;L[F]>D[F]&&(z=ot(z));var q=ot(z),R=[];if(d&&R.push(B[M]<=0),m&&R.push(B[z]<=0,B[q]<=0),R.every((function(t){return t}))){N=j,I=!1;break}$.set(j,R)}if(I)for(var V=function(t){var e=k.find((function(e){var i=$.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return N=e,"break"},K=E?3:1;K>0&&"break"!==V(K);K--);e.placement!==N&&(e.modifiersData[h]._skip=!0,e.placement=N,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function bt(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function vt(t){return[i,s,n,o].some((function(e){return t[e]>=0}))}const yt={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=mt(e,{elementContext:"reference"}),a=mt(e,{altBoundary:!0}),l=bt(r,n),c=bt(a,s,o),h=vt(l),u=vt(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:u},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":u})}},wt={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,n=t.options,r=t.name,a=n.offset,l=void 0===a?[0,0]:a,c=g.reduce((function(t,n){return t[n]=function(t,e,n){var r=S(t),a=[o,i].indexOf(r)>=0?-1:1,l="function"==typeof n?n(Object.assign({},e,{placement:t})):n,c=l[0],h=l[1];return c=c||0,h=(h||0)*a,[o,s].indexOf(r)>=0?{x:h,y:c}:{x:c,y:h}}(n,e.rects,l),t}),{}),h=c[e.placement],u=h.x,d=h.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=u,e.modifiersData.popperOffsets.y+=d),e.modifiersData[r]=c}},At={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=gt({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},Et={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,r=t.options,a=t.name,c=r.mainAxis,h=void 0===c||c,u=r.altAxis,d=void 0!==u&&u,f=r.boundary,p=r.rootBoundary,g=r.altBoundary,m=r.padding,_=r.tether,b=void 0===_||_,v=r.tetherOffset,y=void 0===v?0:v,w=mt(e,{boundary:f,rootBoundary:p,padding:m,altBoundary:g}),A=S(e.placement),E=J(e.placement),C=!E,T=Q(A),O="x"===T?"y":"x",x=e.modifiersData.popperOffsets,k=e.rects.reference,L=e.rects.popper,D="function"==typeof y?y(Object.assign({},e.rects,{placement:e.placement})):y,$="number"==typeof D?{mainAxis:D,altAxis:D}:Object.assign({mainAxis:0,altAxis:0},D),P=e.modifiersData.offset?e.modifiersData.offset[e.placement]:null,j={x:0,y:0};if(x){if(h){var M,H="y"===T?i:o,F="y"===T?n:s,B="y"===T?"height":"width",z=x[T],q=z+w[H],R=z-w[F],V=b?-L[B]/2:0,Y=E===l?k[B]:L[B],U=E===l?-L[B]:-k[B],G=e.elements.arrow,Z=b&&G?W(G):{width:0,height:0},tt=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},et=tt[H],it=tt[F],nt=X(0,k[B],Z[B]),st=C?k[B]/2-V-nt-et-$.mainAxis:Y-nt-et-$.mainAxis,ot=C?-k[B]/2+V+nt+it+$.mainAxis:U+nt+it+$.mainAxis,rt=e.elements.arrow&&K(e.elements.arrow),at=rt?"y"===T?rt.clientTop||0:rt.clientLeft||0:0,lt=null!=(M=null==P?void 0:P[T])?M:0,ct=z+ot-lt,ht=X(b?N(q,z+st-lt-at):q,z,b?I(R,ct):R);x[T]=ht,j[T]=ht-z}if(d){var ut,dt="x"===T?i:o,ft="x"===T?n:s,pt=x[O],gt="y"===O?"height":"width",_t=pt+w[dt],bt=pt-w[ft],vt=-1!==[i,o].indexOf(A),yt=null!=(ut=null==P?void 0:P[O])?ut:0,wt=vt?_t:pt-k[gt]-L[gt]-yt+$.altAxis,At=vt?pt+k[gt]+L[gt]-yt-$.altAxis:bt,Et=b&&vt?function(t,e,i){var n=X(t,e,i);return n>i?i:n}(wt,pt,At):X(b?wt:_t,pt,b?At:bt);x[O]=Et,j[O]=Et-pt}e.modifiersData[a]=j}},requiresIfExists:["offset"]};function Ct(t,e,i){void 0===i&&(i=!1);var n,s,o=L(e),r=L(e)&&function(t){var e=t.getBoundingClientRect(),i=P(e.width)/t.offsetWidth||1,n=P(e.height)/t.offsetHeight||1;return 1!==i||1!==n}(e),a=q(e),l=H(t,r,i),c={scrollLeft:0,scrollTop:0},h={x:0,y:0};return(o||!o&&!i)&&(("body"!==O(e)||ht(a))&&(c=(n=e)!==x(n)&&L(n)?{scrollLeft:(s=n).scrollLeft,scrollTop:s.scrollTop}:lt(n)),L(e)?((h=H(e,!0)).x+=e.clientLeft,h.y+=e.clientTop):a&&(h.x=ct(a))),{x:l.left+c.scrollLeft-h.x,y:l.top+c.scrollTop-h.y,width:l.width,height:l.height}}function Tt(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var Ot={placement:"bottom",modifiers:[],strategy:"absolute"};function xt(){for(var t=arguments.length,e=new Array(t),i=0;i{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e},Nt=t=>{const e=It(t);return e&&document.querySelector(e)?e:null},Pt=t=>{const e=It(t);return e?document.querySelector(e):null},jt=t=>{t.dispatchEvent(new Event(St))},Mt=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),Ht=t=>Mt(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(t):null,Wt=t=>{if(!Mt(t)||0===t.getClientRects().length)return!1;const e="visible"===getComputedStyle(t).getPropertyValue("visibility"),i=t.closest("details:not([open])");if(!i)return e;if(i!==t){const e=t.closest("summary");if(e&&e.parentNode!==i)return!1;if(null===e)return!1}return e},Ft=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),Bt=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?Bt(t.parentNode):null},zt=()=>{},qt=t=>{t.offsetHeight},Rt=()=>window.jQuery&&!document.body.hasAttribute("data-bs-no-jquery")?window.jQuery:null,Vt=[],Kt=()=>"rtl"===document.documentElement.dir,Qt=t=>{var e;e=()=>{const e=Rt();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(Vt.length||document.addEventListener("DOMContentLoaded",(()=>{for(const t of Vt)t()})),Vt.push(e)):e()},Xt=t=>{"function"==typeof t&&t()},Yt=(t,e,i=!0)=>{if(!i)return void Xt(t);const n=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let s=!1;const o=({target:i})=>{i===e&&(s=!0,e.removeEventListener(St,o),Xt(t))};e.addEventListener(St,o),setTimeout((()=>{s||jt(e)}),n)},Ut=(t,e,i,n)=>{const s=t.length;let o=t.indexOf(e);return-1===o?!i&&n?t[s-1]:t[0]:(o+=i?1:-1,n&&(o=(o+s)%s),t[Math.max(0,Math.min(o,s-1))])},Gt=/[^.]*(?=\..*)\.|.*/,Jt=/\..*/,Zt=/::\d+$/,te={};let ee=1;const ie={mouseenter:"mouseover",mouseleave:"mouseout"},ne=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function se(t,e){return e&&`${e}::${ee++}`||t.uidEvent||ee++}function oe(t){const e=se(t);return t.uidEvent=e,te[e]=te[e]||{},te[e]}function re(t,e,i=null){return Object.values(t).find((t=>t.callable===e&&t.delegationSelector===i))}function ae(t,e,i){const n="string"==typeof e,s=n?i:e||i;let o=ue(t);return ne.has(o)||(o=t),[n,s,o]}function le(t,e,i,n,s){if("string"!=typeof e||!t)return;let[o,r,a]=ae(e,i,n);if(e in ie){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};r=t(r)}const l=oe(t),c=l[a]||(l[a]={}),h=re(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const u=se(r,e.replace(Gt,"")),d=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(const a of o)if(a===r)return fe(s,{delegateTarget:r}),n.oneOff&&de.off(t,s.type,e,i),i.apply(r,[s])}}(t,i,r):function(t,e){return function i(n){return fe(n,{delegateTarget:t}),i.oneOff&&de.off(t,n.type,e),e.apply(t,[n])}}(t,r);d.delegationSelector=o?i:null,d.callable=r,d.oneOff=s,d.uidEvent=u,c[u]=d,t.addEventListener(a,d,o)}function ce(t,e,i,n,s){const o=re(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function he(t,e,i,n){const s=e[i]||{};for(const o of Object.keys(s))if(o.includes(n)){const n=s[o];ce(t,e,i,n.callable,n.delegationSelector)}}function ue(t){return t=t.replace(Jt,""),ie[t]||t}const de={on(t,e,i,n){le(t,e,i,n,!1)},one(t,e,i,n){le(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=ae(e,i,n),a=r!==e,l=oe(t),c=l[r]||{},h=e.startsWith(".");if(void 0===o){if(h)for(const i of Object.keys(l))he(t,l,i,e.slice(1));for(const i of Object.keys(c)){const n=i.replace(Zt,"");if(!a||e.includes(n)){const e=c[i];ce(t,l,r,e.callable,e.delegationSelector)}}}else{if(!Object.keys(c).length)return;ce(t,l,r,o,s?i:null)}},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=Rt();let s=null,o=!0,r=!0,a=!1;e!==ue(e)&&n&&(s=n.Event(e,i),n(t).trigger(s),o=!s.isPropagationStopped(),r=!s.isImmediatePropagationStopped(),a=s.isDefaultPrevented());let l=new Event(e,{bubbles:o,cancelable:!0});return l=fe(l,i),a&&l.preventDefault(),r&&t.dispatchEvent(l),l.defaultPrevented&&s&&s.preventDefault(),l}};function fe(t,e){for(const[i,n]of Object.entries(e||{}))try{t[i]=n}catch(e){Object.defineProperty(t,i,{configurable:!0,get:()=>n})}return t}const pe=new Map,ge={set(t,e,i){pe.has(t)||pe.set(t,new Map);const n=pe.get(t);n.has(e)||0===n.size?n.set(e,i):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(n.keys())[0]}.`)},get:(t,e)=>pe.has(t)&&pe.get(t).get(e)||null,remove(t,e){if(!pe.has(t))return;const i=pe.get(t);i.delete(e),0===i.size&&pe.delete(t)}};function me(t){if("true"===t)return!0;if("false"===t)return!1;if(t===Number(t).toString())return Number(t);if(""===t||"null"===t)return null;if("string"!=typeof t)return t;try{return JSON.parse(decodeURIComponent(t))}catch(e){return t}}function _e(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}const be={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${_e(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${_e(e)}`)},getDataAttributes(t){if(!t)return{};const e={},i=Object.keys(t.dataset).filter((t=>t.startsWith("bs")&&!t.startsWith("bsConfig")));for(const n of i){let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),e[i]=me(t.dataset[n])}return e},getDataAttribute:(t,e)=>me(t.getAttribute(`data-bs-${_e(e)}`))};class ve{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){const i=Mt(e)?be.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof i?i:{},...Mt(e)?be.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t,e=this.constructor.DefaultType){for(const n of Object.keys(e)){const s=e[n],o=t[n],r=Mt(o)?"element":null==(i=o)?`${i}`:Object.prototype.toString.call(i).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(s).test(r))throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option "${n}" provided type "${r}" but expected type "${s}".`)}var i}}class ye extends ve{constructor(t,e){super(),(t=Ht(t))&&(this._element=t,this._config=this._getConfig(e),ge.set(this._element,this.constructor.DATA_KEY,this))}dispose(){ge.remove(this._element,this.constructor.DATA_KEY),de.off(this._element,this.constructor.EVENT_KEY);for(const t of Object.getOwnPropertyNames(this))this[t]=null}_queueCallback(t,e,i=!0){Yt(t,e,i)}_getConfig(t){return t=this._mergeConfigObj(t,this._element),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}static getInstance(t){return ge.get(Ht(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.2.3"}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}static eventName(t){return`${t}${this.EVENT_KEY}`}}const we=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,n=t.NAME;de.on(document,i,`[data-bs-dismiss="${n}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),Ft(this))return;const s=Pt(this)||this.closest(`.${n}`);t.getOrCreateInstance(s)[e]()}))},Ae=".bs.alert",Ee=`close${Ae}`,Ce=`closed${Ae}`;class Te extends ye{static get NAME(){return"alert"}close(){if(de.trigger(this._element,Ee).defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),de.trigger(this._element,Ce),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=Te.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}we(Te,"close"),Qt(Te);const Oe='[data-bs-toggle="button"]';class xe extends ye{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=xe.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}de.on(document,"click.bs.button.data-api",Oe,(t=>{t.preventDefault();const e=t.target.closest(Oe);xe.getOrCreateInstance(e).toggle()})),Qt(xe);const ke={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode.closest(e);for(;n;)i.push(n),n=n.parentNode.closest(e);return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(",");return this.find(e,t).filter((t=>!Ft(t)&&Wt(t)))}},Le=".bs.swipe",De=`touchstart${Le}`,$e=`touchmove${Le}`,Se=`touchend${Le}`,Ie=`pointerdown${Le}`,Ne=`pointerup${Le}`,Pe={endCallback:null,leftCallback:null,rightCallback:null},je={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class Me extends ve{constructor(t,e){super(),this._element=t,t&&Me.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return Pe}static get DefaultType(){return je}static get NAME(){return"swipe"}dispose(){de.off(this._element,Le)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),Xt(this._config.endCallback)}_move(t){this._deltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this._deltaX}_handleSwipe(){const t=Math.abs(this._deltaX);if(t<=40)return;const e=t/this._deltaX;this._deltaX=0,e&&Xt(e>0?this._config.rightCallback:this._config.leftCallback)}_initEvents(){this._supportPointerEvents?(de.on(this._element,Ie,(t=>this._start(t))),de.on(this._element,Ne,(t=>this._end(t))),this._element.classList.add("pointer-event")):(de.on(this._element,De,(t=>this._start(t))),de.on(this._element,$e,(t=>this._move(t))),de.on(this._element,Se,(t=>this._end(t))))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||navigator.maxTouchPoints>0}}const He=".bs.carousel",We=".data-api",Fe="next",Be="prev",ze="left",qe="right",Re=`slide${He}`,Ve=`slid${He}`,Ke=`keydown${He}`,Qe=`mouseenter${He}`,Xe=`mouseleave${He}`,Ye=`dragstart${He}`,Ue=`load${He}${We}`,Ge=`click${He}${We}`,Je="carousel",Ze="active",ti=".active",ei=".carousel-item",ii=ti+ei,ni={ArrowLeft:qe,ArrowRight:ze},si={interval:5e3,keyboard:!0,pause:"hover",ride:!1,touch:!0,wrap:!0},oi={interval:"(number|boolean)",keyboard:"boolean",pause:"(string|boolean)",ride:"(boolean|string)",touch:"boolean",wrap:"boolean"};class ri extends ye{constructor(t,e){super(t,e),this._interval=null,this._activeElement=null,this._isSliding=!1,this.touchTimeout=null,this._swipeHelper=null,this._indicatorsElement=ke.findOne(".carousel-indicators",this._element),this._addEventListeners(),this._config.ride===Je&&this.cycle()}static get Default(){return si}static get DefaultType(){return oi}static get NAME(){return"carousel"}next(){this._slide(Fe)}nextWhenVisible(){!document.hidden&&Wt(this._element)&&this.next()}prev(){this._slide(Be)}pause(){this._isSliding&&jt(this._element),this._clearInterval()}cycle(){this._clearInterval(),this._updateInterval(),this._interval=setInterval((()=>this.nextWhenVisible()),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?de.one(this._element,Ve,(()=>this.cycle())):this.cycle())}to(t){const e=this._getItems();if(t>e.length-1||t<0)return;if(this._isSliding)return void de.one(this._element,Ve,(()=>this.to(t)));const i=this._getItemIndex(this._getActive());if(i===t)return;const n=t>i?Fe:Be;this._slide(n,e[t])}dispose(){this._swipeHelper&&this._swipeHelper.dispose(),super.dispose()}_configAfterMerge(t){return t.defaultInterval=t.interval,t}_addEventListeners(){this._config.keyboard&&de.on(this._element,Ke,(t=>this._keydown(t))),"hover"===this._config.pause&&(de.on(this._element,Qe,(()=>this.pause())),de.on(this._element,Xe,(()=>this._maybeEnableCycle()))),this._config.touch&&Me.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of ke.find(".carousel-item img",this._element))de.on(t,Ye,(t=>t.preventDefault()));const t={leftCallback:()=>this._slide(this._directionToOrder(ze)),rightCallback:()=>this._slide(this._directionToOrder(qe)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((()=>this._maybeEnableCycle()),500+this._config.interval))}};this._swipeHelper=new Me(this._element,t)}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=ni[t.key];e&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){if(!this._indicatorsElement)return;const e=ke.findOne(ti,this._indicatorsElement);e.classList.remove(Ze),e.removeAttribute("aria-current");const i=ke.findOne(`[data-bs-slide-to="${t}"]`,this._indicatorsElement);i&&(i.classList.add(Ze),i.setAttribute("aria-current","true"))}_updateInterval(){const t=this._activeElement||this._getActive();if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);this._config.interval=e||this._config.defaultInterval}_slide(t,e=null){if(this._isSliding)return;const i=this._getActive(),n=t===Fe,s=e||Ut(this._getItems(),i,n,this._config.wrap);if(s===i)return;const o=this._getItemIndex(s),r=e=>de.trigger(this._element,e,{relatedTarget:s,direction:this._orderToDirection(t),from:this._getItemIndex(i),to:o});if(r(Re).defaultPrevented)return;if(!i||!s)return;const a=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(o),this._activeElement=s;const l=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";s.classList.add(c),qt(s),i.classList.add(l),s.classList.add(l),this._queueCallback((()=>{s.classList.remove(l,c),s.classList.add(Ze),i.classList.remove(Ze,c,l),this._isSliding=!1,r(Ve)}),i,this._isAnimated()),a&&this.cycle()}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return ke.findOne(ii,this._element)}_getItems(){return ke.find(ei,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return Kt()?t===ze?Be:Fe:t===ze?Fe:Be}_orderToDirection(t){return Kt()?t===Be?ze:qe:t===Be?qe:ze}static jQueryInterface(t){return this.each((function(){const e=ri.getOrCreateInstance(this,t);if("number"!=typeof t){if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}else e.to(t)}))}}de.on(document,Ge,"[data-bs-slide], [data-bs-slide-to]",(function(t){const e=Pt(this);if(!e||!e.classList.contains(Je))return;t.preventDefault();const i=ri.getOrCreateInstance(e),n=this.getAttribute("data-bs-slide-to");return n?(i.to(n),void i._maybeEnableCycle()):"next"===be.getDataAttribute(this,"slide")?(i.next(),void i._maybeEnableCycle()):(i.prev(),void i._maybeEnableCycle())})),de.on(window,Ue,(()=>{const t=ke.find('[data-bs-ride="carousel"]');for(const e of t)ri.getOrCreateInstance(e)})),Qt(ri);const ai=".bs.collapse",li=`show${ai}`,ci=`shown${ai}`,hi=`hide${ai}`,ui=`hidden${ai}`,di=`click${ai}.data-api`,fi="show",pi="collapse",gi="collapsing",mi=`:scope .${pi} .${pi}`,_i='[data-bs-toggle="collapse"]',bi={parent:null,toggle:!0},vi={parent:"(null|element)",toggle:"boolean"};class yi extends ye{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];const i=ke.find(_i);for(const t of i){const e=Nt(t),i=ke.find(e).filter((t=>t===this._element));null!==e&&i.length&&this._triggerArray.push(t)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return bi}static get DefaultType(){return vi}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t=[];if(this._config.parent&&(t=this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter((t=>t!==this._element)).map((t=>yi.getOrCreateInstance(t,{toggle:!1})))),t.length&&t[0]._isTransitioning)return;if(de.trigger(this._element,li).defaultPrevented)return;for(const e of t)e.hide();const e=this._getDimension();this._element.classList.remove(pi),this._element.classList.add(gi),this._element.style[e]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const i=`scroll${e[0].toUpperCase()+e.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(gi),this._element.classList.add(pi,fi),this._element.style[e]="",de.trigger(this._element,ci)}),this._element,!0),this._element.style[e]=`${this._element[i]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(de.trigger(this._element,hi).defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,qt(this._element),this._element.classList.add(gi),this._element.classList.remove(pi,fi);for(const t of this._triggerArray){const e=Pt(t);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([t],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(gi),this._element.classList.add(pi),de.trigger(this._element,ui)}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(fi)}_configAfterMerge(t){return t.toggle=Boolean(t.toggle),t.parent=Ht(t.parent),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=this._getFirstLevelChildren(_i);for(const e of t){const t=Pt(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))}}_getFirstLevelChildren(t){const e=ke.find(mi,this._config.parent);return ke.find(t,this._config.parent).filter((t=>!e.includes(t)))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const i of t)i.classList.toggle("collapsed",!e),i.setAttribute("aria-expanded",e)}static jQueryInterface(t){const e={};return"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1),this.each((function(){const i=yi.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}de.on(document,di,_i,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();const e=Nt(this),i=ke.find(e);for(const t of i)yi.getOrCreateInstance(t,{toggle:!1}).toggle()})),Qt(yi);const wi="dropdown",Ai=".bs.dropdown",Ei=".data-api",Ci="ArrowUp",Ti="ArrowDown",Oi=`hide${Ai}`,xi=`hidden${Ai}`,ki=`show${Ai}`,Li=`shown${Ai}`,Di=`click${Ai}${Ei}`,$i=`keydown${Ai}${Ei}`,Si=`keyup${Ai}${Ei}`,Ii="show",Ni='[data-bs-toggle="dropdown"]:not(.disabled):not(:disabled)',Pi=`${Ni}.${Ii}`,ji=".dropdown-menu",Mi=Kt()?"top-end":"top-start",Hi=Kt()?"top-start":"top-end",Wi=Kt()?"bottom-end":"bottom-start",Fi=Kt()?"bottom-start":"bottom-end",Bi=Kt()?"left-start":"right-start",zi=Kt()?"right-start":"left-start",qi={autoClose:!0,boundary:"clippingParents",display:"dynamic",offset:[0,2],popperConfig:null,reference:"toggle"},Ri={autoClose:"(boolean|string)",boundary:"(string|element)",display:"string",offset:"(array|string|function)",popperConfig:"(null|object|function)",reference:"(string|element|object)"};class Vi extends ye{constructor(t,e){super(t,e),this._popper=null,this._parent=this._element.parentNode,this._menu=ke.next(this._element,ji)[0]||ke.prev(this._element,ji)[0]||ke.findOne(ji,this._parent),this._inNavbar=this._detectNavbar()}static get Default(){return qi}static get DefaultType(){return Ri}static get NAME(){return wi}toggle(){return this._isShown()?this.hide():this.show()}show(){if(Ft(this._element)||this._isShown())return;const t={relatedTarget:this._element};if(!de.trigger(this._element,ki,t).defaultPrevented){if(this._createPopper(),"ontouchstart"in document.documentElement&&!this._parent.closest(".navbar-nav"))for(const t of[].concat(...document.body.children))de.on(t,"mouseover",zt);this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Ii),this._element.classList.add(Ii),de.trigger(this._element,Li,t)}}hide(){if(Ft(this._element)||!this._isShown())return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){if(!de.trigger(this._element,Oi,t).defaultPrevented){if("ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))de.off(t,"mouseover",zt);this._popper&&this._popper.destroy(),this._menu.classList.remove(Ii),this._element.classList.remove(Ii),this._element.setAttribute("aria-expanded","false"),be.removeDataAttribute(this._menu,"popper"),de.trigger(this._element,xi,t)}}_getConfig(t){if("object"==typeof(t=super._getConfig(t)).reference&&!Mt(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${wi.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(){if(void 0===e)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=this._parent:Mt(this._config.reference)?t=Ht(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const i=this._getPopperConfig();this._popper=Dt(t,this._menu,i)}_isShown(){return this._menu.classList.contains(Ii)}_getPlacement(){const t=this._parent;if(t.classList.contains("dropend"))return Bi;if(t.classList.contains("dropstart"))return zi;if(t.classList.contains("dropup-center"))return"top";if(t.classList.contains("dropdown-center"))return"bottom";const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?Hi:Mi:e?Fi:Wi}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return(this._inNavbar||"static"===this._config.display)&&(be.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_selectMenuItem({key:t,target:e}){const i=ke.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter((t=>Wt(t)));i.length&&Ut(i,e,t===Ti,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=Vi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(2===t.button||"keyup"===t.type&&"Tab"!==t.key)return;const e=ke.find(Pi);for(const i of e){const e=Vi.getInstance(i);if(!e||!1===e._config.autoClose)continue;const n=t.composedPath(),s=n.includes(e._menu);if(n.includes(e._element)||"inside"===e._config.autoClose&&!s||"outside"===e._config.autoClose&&s)continue;if(e._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;const o={relatedTarget:e._element};"click"===t.type&&(o.clickEvent=t),e._completeHide(o)}}static dataApiKeydownHandler(t){const e=/input|textarea/i.test(t.target.tagName),i="Escape"===t.key,n=[Ci,Ti].includes(t.key);if(!n&&!i)return;if(e&&!i)return;t.preventDefault();const s=this.matches(Ni)?this:ke.prev(this,Ni)[0]||ke.next(this,Ni)[0]||ke.findOne(Ni,t.delegateTarget.parentNode),o=Vi.getOrCreateInstance(s);if(n)return t.stopPropagation(),o.show(),void o._selectMenuItem(t);o._isShown()&&(t.stopPropagation(),o.hide(),s.focus())}}de.on(document,$i,Ni,Vi.dataApiKeydownHandler),de.on(document,$i,ji,Vi.dataApiKeydownHandler),de.on(document,Di,Vi.clearMenus),de.on(document,Si,Vi.clearMenus),de.on(document,Di,Ni,(function(t){t.preventDefault(),Vi.getOrCreateInstance(this).toggle()})),Qt(Vi);const Ki=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",Qi=".sticky-top",Xi="padding-right",Yi="margin-right";class Ui{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,Xi,(e=>e+t)),this._setElementAttributes(Ki,Xi,(e=>e+t)),this._setElementAttributes(Qi,Yi,(e=>e-t))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,Xi),this._resetElementAttributes(Ki,Xi),this._resetElementAttributes(Qi,Yi)}isOverflowing(){return this.getWidth()>0}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t).getPropertyValue(e);t.style.setProperty(e,`${i(Number.parseFloat(s))}px`)}))}_saveInitialAttribute(t,e){const i=t.style.getPropertyValue(e);i&&be.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=be.getDataAttribute(t,e);null!==i?(be.removeDataAttribute(t,e),t.style.setProperty(e,i)):t.style.removeProperty(e)}))}_applyManipulationCallback(t,e){if(Mt(t))e(t);else for(const i of ke.find(t,this._element))e(i)}}const Gi="backdrop",Ji="show",Zi=`mousedown.bs.${Gi}`,tn={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},en={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};class nn extends ve{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return tn}static get DefaultType(){return en}static get NAME(){return Gi}show(t){if(!this._config.isVisible)return void Xt(t);this._append();const e=this._getElement();this._config.isAnimated&&qt(e),e.classList.add(Ji),this._emulateAnimation((()=>{Xt(t)}))}hide(t){this._config.isVisible?(this._getElement().classList.remove(Ji),this._emulateAnimation((()=>{this.dispose(),Xt(t)}))):Xt(t)}dispose(){this._isAppended&&(de.off(this._element,Zi),this._element.remove(),this._isAppended=!1)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_configAfterMerge(t){return t.rootElement=Ht(t.rootElement),t}_append(){if(this._isAppended)return;const t=this._getElement();this._config.rootElement.append(t),de.on(t,Zi,(()=>{Xt(this._config.clickCallback)})),this._isAppended=!0}_emulateAnimation(t){Yt(t,this._getElement(),this._config.isAnimated)}}const sn=".bs.focustrap",on=`focusin${sn}`,rn=`keydown.tab${sn}`,an="backward",ln={autofocus:!0,trapElement:null},cn={autofocus:"boolean",trapElement:"element"};class hn extends ve{constructor(t){super(),this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}static get Default(){return ln}static get DefaultType(){return cn}static get NAME(){return"focustrap"}activate(){this._isActive||(this._config.autofocus&&this._config.trapElement.focus(),de.off(document,sn),de.on(document,on,(t=>this._handleFocusin(t))),de.on(document,rn,(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,de.off(document,sn))}_handleFocusin(t){const{trapElement:e}=this._config;if(t.target===document||t.target===e||e.contains(t.target))return;const i=ke.focusableChildren(e);0===i.length?e.focus():this._lastTabNavDirection===an?i[i.length-1].focus():i[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?an:"forward")}}const un=".bs.modal",dn=`hide${un}`,fn=`hidePrevented${un}`,pn=`hidden${un}`,gn=`show${un}`,mn=`shown${un}`,_n=`resize${un}`,bn=`click.dismiss${un}`,vn=`mousedown.dismiss${un}`,yn=`keydown.dismiss${un}`,wn=`click${un}.data-api`,An="modal-open",En="show",Cn="modal-static",Tn={backdrop:!0,focus:!0,keyboard:!0},On={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class xn extends ye{constructor(t,e){super(t,e),this._dialog=ke.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new Ui,this._addEventListeners()}static get Default(){return Tn}static get DefaultType(){return On}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||de.trigger(this._element,gn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(An),this._adjustDialog(),this._backdrop.show((()=>this._showElement(t))))}hide(){this._isShown&&!this._isTransitioning&&(de.trigger(this._element,dn).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove(En),this._queueCallback((()=>this._hideModal()),this._element,this._isAnimated())))}dispose(){for(const t of[window,this._dialog])de.off(t,un);this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new nn({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new hn({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;const e=ke.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),qt(this._element),this._element.classList.add(En),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,de.trigger(this._element,mn,{relatedTarget:t})}),this._dialog,this._isAnimated())}_addEventListeners(){de.on(this._element,yn,(t=>{if("Escape"===t.key)return this._config.keyboard?(t.preventDefault(),void this.hide()):void this._triggerBackdropTransition()})),de.on(window,_n,(()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()})),de.on(this._element,vn,(t=>{de.one(this._element,bn,(e=>{this._element===t.target&&this._element===e.target&&("static"!==this._config.backdrop?this._config.backdrop&&this.hide():this._triggerBackdropTransition())}))}))}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(An),this._resetAdjustments(),this._scrollBar.reset(),de.trigger(this._element,pn)}))}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(de.trigger(this._element,fn).defaultPrevented)return;const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(Cn)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(Cn),this._queueCallback((()=>{this._element.classList.remove(Cn),this._queueCallback((()=>{this._element.style.overflowY=e}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;if(i&&!t){const t=Kt()?"paddingLeft":"paddingRight";this._element.style[t]=`${e}px`}if(!i&&t){const t=Kt()?"paddingRight":"paddingLeft";this._element.style[t]=`${e}px`}}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=xn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}de.on(document,wn,'[data-bs-toggle="modal"]',(function(t){const e=Pt(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),de.one(e,gn,(t=>{t.defaultPrevented||de.one(e,pn,(()=>{Wt(this)&&this.focus()}))}));const i=ke.findOne(".modal.show");i&&xn.getInstance(i).hide(),xn.getOrCreateInstance(e).toggle(this)})),we(xn),Qt(xn);const kn=".bs.offcanvas",Ln=".data-api",Dn=`load${kn}${Ln}`,$n="show",Sn="showing",In="hiding",Nn=".offcanvas.show",Pn=`show${kn}`,jn=`shown${kn}`,Mn=`hide${kn}`,Hn=`hidePrevented${kn}`,Wn=`hidden${kn}`,Fn=`resize${kn}`,Bn=`click${kn}${Ln}`,zn=`keydown.dismiss${kn}`,qn={backdrop:!0,keyboard:!0,scroll:!1},Rn={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class Vn extends ye{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return qn}static get DefaultType(){return Rn}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||de.trigger(this._element,Pn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new Ui).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(Sn),this._queueCallback((()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add($n),this._element.classList.remove(Sn),de.trigger(this._element,jn,{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(de.trigger(this._element,Mn).defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add(In),this._backdrop.hide(),this._queueCallback((()=>{this._element.classList.remove($n,In),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new Ui).reset(),de.trigger(this._element,Wn)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){const t=Boolean(this._config.backdrop);return new nn({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"!==this._config.backdrop?this.hide():de.trigger(this._element,Hn)}:null})}_initializeFocusTrap(){return new hn({trapElement:this._element})}_addEventListeners(){de.on(this._element,zn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():de.trigger(this._element,Hn))}))}static jQueryInterface(t){return this.each((function(){const e=Vn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}de.on(document,Bn,'[data-bs-toggle="offcanvas"]',(function(t){const e=Pt(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),Ft(this))return;de.one(e,Wn,(()=>{Wt(this)&&this.focus()}));const i=ke.findOne(Nn);i&&i!==e&&Vn.getInstance(i).hide(),Vn.getOrCreateInstance(e).toggle(this)})),de.on(window,Dn,(()=>{for(const t of ke.find(Nn))Vn.getOrCreateInstance(t).show()})),de.on(window,Fn,(()=>{for(const t of ke.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&Vn.getOrCreateInstance(t).hide()})),we(Vn),Qt(Vn);const Kn=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Qn=/^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i,Xn=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[\d+/a-z]+=*$/i,Yn=(t,e)=>{const i=t.nodeName.toLowerCase();return e.includes(i)?!Kn.has(i)||Boolean(Qn.test(t.nodeValue)||Xn.test(t.nodeValue)):e.filter((t=>t instanceof RegExp)).some((t=>t.test(i)))},Un={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Gn={allowList:Un,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Jn={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},Zn={entry:"(string|element|function|null)",selector:"(string|element)"};class ts extends ve{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return Gn}static get DefaultType(){return Jn}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map((t=>this._resolvePossibleFunction(t))).filter(Boolean)}hasContent(){return this.getContent().length>0}changeContent(t){return this._checkContent(t),this._config.content={...this._config.content,...t},this}toHtml(){const t=document.createElement("div");t.innerHTML=this._maybeSanitize(this._config.template);for(const[e,i]of Object.entries(this._config.content))this._setContent(t,i,e);const e=t.children[0],i=this._resolvePossibleFunction(this._config.extraClass);return i&&e.classList.add(...i.split(" ")),e}_typeCheckConfig(t){super._typeCheckConfig(t),this._checkContent(t.content)}_checkContent(t){for(const[e,i]of Object.entries(t))super._typeCheckConfig({selector:e,entry:i},Zn)}_setContent(t,e,i){const n=ke.findOne(i,t);n&&((e=this._resolvePossibleFunction(e))?Mt(e)?this._putElementInTemplate(Ht(e),n):this._config.html?n.innerHTML=this._maybeSanitize(e):n.textContent=e:n.remove())}_maybeSanitize(t){return this._config.sanitize?function(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const n=(new window.DOMParser).parseFromString(t,"text/html"),s=[].concat(...n.body.querySelectorAll("*"));for(const t of s){const i=t.nodeName.toLowerCase();if(!Object.keys(e).includes(i)){t.remove();continue}const n=[].concat(...t.attributes),s=[].concat(e["*"]||[],e[i]||[]);for(const e of n)Yn(e,s)||t.removeAttribute(e.nodeName)}return n.body.innerHTML}(t,this._config.allowList,this._config.sanitizeFn):t}_resolvePossibleFunction(t){return"function"==typeof t?t(this):t}_putElementInTemplate(t,e){if(this._config.html)return e.innerHTML="",void e.append(t);e.textContent=t.textContent}}const es=new Set(["sanitize","allowList","sanitizeFn"]),is="fade",ns="show",ss=".modal",os="hide.bs.modal",rs="hover",as="focus",ls={AUTO:"auto",TOP:"top",RIGHT:Kt()?"left":"right",BOTTOM:"bottom",LEFT:Kt()?"right":"left"},cs={allowList:Un,animation:!0,boundary:"clippingParents",container:!1,customClass:"",delay:0,fallbackPlacements:["top","right","bottom","left"],html:!1,offset:[0,0],placement:"top",popperConfig:null,sanitize:!0,sanitizeFn:null,selector:!1,template:'',title:"",trigger:"hover focus"},hs={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class us extends ye{constructor(t,i){if(void 0===e)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,i),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return cs}static get DefaultType(){return hs}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),de.off(this._element.closest(ss),os,this._hideModalHandler),this._element.getAttribute("data-bs-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-bs-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this._isWithContent()||!this._isEnabled)return;const t=de.trigger(this._element,this.constructor.eventName("show")),e=(Bt(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(t.defaultPrevented||!e)return;this._disposePopper();const i=this._getTipElement();this._element.setAttribute("aria-describedby",i.getAttribute("id"));const{container:n}=this._config;if(this._element.ownerDocument.documentElement.contains(this.tip)||(n.append(i),de.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(i),i.classList.add(ns),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))de.on(t,"mouseover",zt);this._queueCallback((()=>{de.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1}),this.tip,this._isAnimated())}hide(){if(this._isShown()&&!de.trigger(this._element,this.constructor.eventName("hide")).defaultPrevented){if(this._getTipElement().classList.remove(ns),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))de.off(t,"mouseover",zt);this._activeTrigger.click=!1,this._activeTrigger[as]=!1,this._activeTrigger[rs]=!1,this._isHovered=null,this._queueCallback((()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),de.trigger(this._element,this.constructor.eventName("hidden")))}),this.tip,this._isAnimated())}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){const e=this._getTemplateFactory(t).toHtml();if(!e)return null;e.classList.remove(is,ns),e.classList.add(`bs-${this.constructor.NAME}-auto`);const i=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME).toString();return e.setAttribute("id",i),this._isAnimated()&&e.classList.add(is),e}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new ts({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{".tooltip-inner":this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-bs-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(is)}_isShown(){return this.tip&&this.tip.classList.contains(ns)}_createPopper(t){const e="function"==typeof this._config.placement?this._config.placement.call(this,t,this._element):this._config.placement,i=ls[e.toUpperCase()];return Dt(this._element,t,this._getPopperConfig(i))}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return"function"==typeof t?t.call(this._element):t}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...e,..."function"==typeof this._config.popperConfig?this._config.popperConfig(e):this._config.popperConfig}}_setListeners(){const t=this._config.trigger.split(" ");for(const e of t)if("click"===e)de.on(this._element,this.constructor.eventName("click"),this._config.selector,(t=>{this._initializeOnDelegatedTarget(t).toggle()}));else if("manual"!==e){const t=e===rs?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),i=e===rs?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout");de.on(this._element,t,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?as:rs]=!0,e._enter()})),de.on(this._element,i,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?as:rs]=e._element.contains(t.relatedTarget),e._leave()}))}this._hideModalHandler=()=>{this._element&&this.hide()},de.on(this._element.closest(ss),os,this._hideModalHandler)}_fixTitle(){const t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-bs-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout((()=>{this._isHovered&&this.show()}),this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout((()=>{this._isHovered||this.hide()}),this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){const e=be.getDataAttributes(this._element);for(const t of Object.keys(e))es.has(t)&&delete e[t];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:Ht(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){const t={};for(const e in this._config)this.constructor.Default[e]!==this._config[e]&&(t[e]=this._config[e]);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(t){return this.each((function(){const e=us.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(us);const ds={...us.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},fs={...us.DefaultType,content:"(null|string|element|function)"};class ps extends us{static get Default(){return ds}static get DefaultType(){return fs}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{".popover-header":this._getTitle(),".popover-body":this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(t){return this.each((function(){const e=ps.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(ps);const gs=".bs.scrollspy",ms=`activate${gs}`,_s=`click${gs}`,bs=`load${gs}.data-api`,vs="active",ys="[href]",ws=".nav-link",As=`${ws}, .nav-item > ${ws}, .list-group-item`,Es={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},Cs={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class Ts extends ye{constructor(t,e){super(t,e),this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh()}static get Default(){return Es}static get DefaultType(){return Cs}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=Ht(t.target)||document.body,t.rootMargin=t.offset?`${t.offset}px 0px -30%`:t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map((t=>Number.parseFloat(t)))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(de.off(this._config.target,_s),de.on(this._config.target,_s,ys,(t=>{const e=this._observableSections.get(t.target.hash);if(e){t.preventDefault();const i=this._rootElement||window,n=e.offsetTop-this._element.offsetTop;if(i.scrollTo)return void i.scrollTo({top:n,behavior:"smooth"});i.scrollTop=n}})))}_getNewObserver(){const t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver((t=>this._observerCallback(t)),t)}_observerCallback(t){const e=t=>this._targetLinks.get(`#${t.target.id}`),i=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},n=(this._rootElement||document.documentElement).scrollTop,s=n>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=n;for(const o of t){if(!o.isIntersecting){this._activeTarget=null,this._clearActiveClass(e(o));continue}const t=o.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(s&&t){if(i(o),!n)return}else s||t||i(o)}}_initializeTargetsAndObservables(){this._targetLinks=new Map,this._observableSections=new Map;const t=ke.find(ys,this._config.target);for(const e of t){if(!e.hash||Ft(e))continue;const t=ke.findOne(e.hash,this._element);Wt(t)&&(this._targetLinks.set(e.hash,e),this._observableSections.set(e.hash,t))}}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),this._activeTarget=t,t.classList.add(vs),this._activateParents(t),de.trigger(this._element,ms,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))ke.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(vs);else for(const e of ke.parents(t,".nav, .list-group"))for(const t of ke.prev(e,As))t.classList.add(vs)}_clearActiveClass(t){t.classList.remove(vs);const e=ke.find(`${ys}.${vs}`,t);for(const t of e)t.classList.remove(vs)}static jQueryInterface(t){return this.each((function(){const e=Ts.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}de.on(window,bs,(()=>{for(const t of ke.find('[data-bs-spy="scroll"]'))Ts.getOrCreateInstance(t)})),Qt(Ts);const Os=".bs.tab",xs=`hide${Os}`,ks=`hidden${Os}`,Ls=`show${Os}`,Ds=`shown${Os}`,$s=`click${Os}`,Ss=`keydown${Os}`,Is=`load${Os}`,Ns="ArrowLeft",Ps="ArrowRight",js="ArrowUp",Ms="ArrowDown",Hs="active",Ws="fade",Fs="show",Bs=":not(.dropdown-toggle)",zs='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',qs=`.nav-link${Bs}, .list-group-item${Bs}, [role="tab"]${Bs}, ${zs}`,Rs=`.${Hs}[data-bs-toggle="tab"], .${Hs}[data-bs-toggle="pill"], .${Hs}[data-bs-toggle="list"]`;class Vs extends ye{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),de.on(this._element,Ss,(t=>this._keydown(t))))}static get NAME(){return"tab"}show(){const t=this._element;if(this._elemIsActive(t))return;const e=this._getActiveElem(),i=e?de.trigger(e,xs,{relatedTarget:t}):null;de.trigger(t,Ls,{relatedTarget:e}).defaultPrevented||i&&i.defaultPrevented||(this._deactivate(e,t),this._activate(t,e))}_activate(t,e){t&&(t.classList.add(Hs),this._activate(Pt(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),de.trigger(t,Ds,{relatedTarget:e})):t.classList.add(Fs)}),t,t.classList.contains(Ws)))}_deactivate(t,e){t&&(t.classList.remove(Hs),t.blur(),this._deactivate(Pt(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),de.trigger(t,ks,{relatedTarget:e})):t.classList.remove(Fs)}),t,t.classList.contains(Ws)))}_keydown(t){if(![Ns,Ps,js,Ms].includes(t.key))return;t.stopPropagation(),t.preventDefault();const e=[Ps,Ms].includes(t.key),i=Ut(this._getChildren().filter((t=>!Ft(t))),t.target,e,!0);i&&(i.focus({preventScroll:!0}),Vs.getOrCreateInstance(i).show())}_getChildren(){return ke.find(qs,this._parent)}_getActiveElem(){return this._getChildren().find((t=>this._elemIsActive(t)))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const t of e)this._setInitialAttributesOnChild(t)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);const e=this._elemIsActive(t),i=this._getOuterElement(t);t.setAttribute("aria-selected",e),i!==t&&this._setAttributeIfNotExists(i,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){const e=Pt(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id&&this._setAttributeIfNotExists(e,"aria-labelledby",`#${t.id}`))}_toggleDropDown(t,e){const i=this._getOuterElement(t);if(!i.classList.contains("dropdown"))return;const n=(t,n)=>{const s=ke.findOne(t,i);s&&s.classList.toggle(n,e)};n(".dropdown-toggle",Hs),n(".dropdown-menu",Fs),i.setAttribute("aria-expanded",e)}_setAttributeIfNotExists(t,e,i){t.hasAttribute(e)||t.setAttribute(e,i)}_elemIsActive(t){return t.classList.contains(Hs)}_getInnerElement(t){return t.matches(qs)?t:ke.findOne(qs,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(t){return this.each((function(){const e=Vs.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}de.on(document,$s,zs,(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),Ft(this)||Vs.getOrCreateInstance(this).show()})),de.on(window,Is,(()=>{for(const t of ke.find(Rs))Vs.getOrCreateInstance(t)})),Qt(Vs);const Ks=".bs.toast",Qs=`mouseover${Ks}`,Xs=`mouseout${Ks}`,Ys=`focusin${Ks}`,Us=`focusout${Ks}`,Gs=`hide${Ks}`,Js=`hidden${Ks}`,Zs=`show${Ks}`,to=`shown${Ks}`,eo="hide",io="show",no="showing",so={animation:"boolean",autohide:"boolean",delay:"number"},oo={animation:!0,autohide:!0,delay:5e3};class ro extends ye{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return oo}static get DefaultType(){return so}static get NAME(){return"toast"}show(){de.trigger(this._element,Zs).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(eo),qt(this._element),this._element.classList.add(io,no),this._queueCallback((()=>{this._element.classList.remove(no),de.trigger(this._element,to),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this.isShown()&&(de.trigger(this._element,Gs).defaultPrevented||(this._element.classList.add(no),this._queueCallback((()=>{this._element.classList.add(eo),this._element.classList.remove(no,io),de.trigger(this._element,Js)}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(io),super.dispose()}isShown(){return this._element.classList.contains(io)}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){de.on(this._element,Qs,(t=>this._onInteraction(t,!0))),de.on(this._element,Xs,(t=>this._onInteraction(t,!1))),de.on(this._element,Ys,(t=>this._onInteraction(t,!0))),de.on(this._element,Us,(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=ro.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}var ao;we(ro),Qt(ro),ao=function(){[].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')).map((function(t){return new us(t,{delay:{show:500,hide:100}})}))},"loading"!=document.readyState?ao():document.addEventListener("DOMContentLoaded",ao)})(); +//# sourceMappingURL=bootstrap.js.map \ No newline at end of file diff --git a/_static/scripts/bootstrap.js.LICENSE.txt b/_static/scripts/bootstrap.js.LICENSE.txt new file mode 100644 index 00000000..91ad10aa --- /dev/null +++ b/_static/scripts/bootstrap.js.LICENSE.txt @@ -0,0 +1,5 @@ +/*! + * Bootstrap v5.2.3 (https://getbootstrap.com/) + * Copyright 2011-2022 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ diff --git a/_static/scripts/bootstrap.js.map b/_static/scripts/bootstrap.js.map new file mode 100644 index 00000000..d83e2f7c --- /dev/null +++ b/_static/scripts/bootstrap.js.map @@ -0,0 +1 @@ +{"version":3,"file":"scripts/bootstrap.js","mappings":";mBACA,IAAIA,EAAsB,CCA1BA,EAAwB,CAACC,EAASC,KACjC,IAAI,IAAIC,KAAOD,EACXF,EAAoBI,EAAEF,EAAYC,KAASH,EAAoBI,EAAEH,EAASE,IAC5EE,OAAOC,eAAeL,EAASE,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,IAE1E,ECNDH,EAAwB,CAACS,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClFV,EAAyBC,IACH,oBAAXa,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeL,EAASa,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeL,EAAS,aAAc,CAAEe,OAAO,GAAO,ipBCLvD,IAAI,EAAM,MACNC,EAAS,SACTC,EAAQ,QACRC,EAAO,OACPC,EAAO,OACPC,EAAiB,CAAC,EAAKJ,EAAQC,EAAOC,GACtCG,EAAQ,QACRC,EAAM,MACNC,EAAkB,kBAClBC,EAAW,WACXC,EAAS,SACTC,EAAY,YACZC,EAAmCP,EAAeQ,QAAO,SAAUC,EAAKC,GACjF,OAAOD,EAAIE,OAAO,CAACD,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAChE,GAAG,IACQ,EAA0B,GAAGS,OAAOX,EAAgB,CAACD,IAAOS,QAAO,SAAUC,EAAKC,GAC3F,OAAOD,EAAIE,OAAO,CAACD,EAAWA,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAC3E,GAAG,IAEQU,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAc,cACdC,EAAQ,QACRC,EAAa,aACbC,EAAiB,CAACT,EAAYC,EAAMC,EAAWC,EAAYC,EAAMC,EAAWC,EAAaC,EAAOC,GC9B5F,SAASE,EAAYC,GAClC,OAAOA,GAAWA,EAAQC,UAAY,IAAIC,cAAgB,IAC5D,CCFe,SAASC,EAAUC,GAChC,GAAY,MAARA,EACF,OAAOC,OAGT,GAAwB,oBAApBD,EAAKE,WAAkC,CACzC,IAAIC,EAAgBH,EAAKG,cACzB,OAAOA,GAAgBA,EAAcC,aAAwBH,MAC/D,CAEA,OAAOD,CACT,CCTA,SAASK,EAAUL,GAEjB,OAAOA,aADUD,EAAUC,GAAMM,SACIN,aAAgBM,OACvD,CAEA,SAASC,EAAcP,GAErB,OAAOA,aADUD,EAAUC,GAAMQ,aACIR,aAAgBQ,WACvD,CAEA,SAASC,EAAaT,GAEpB,MAA0B,oBAAfU,aAKJV,aADUD,EAAUC,GAAMU,YACIV,aAAgBU,WACvD,CCwDA,SACEC,KAAM,cACNC,SAAS,EACTC,MAAO,QACPC,GA5EF,SAAqBC,GACnB,IAAIC,EAAQD,EAAKC,MACjB3D,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIS,EAAQJ,EAAMK,OAAOV,IAAS,CAAC,EAC/BW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EACxCf,EAAUoB,EAAME,SAASP,GAExBJ,EAAcX,IAAaD,EAAYC,KAO5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUR,GACxC,IAAI3C,EAAQsD,EAAWX,IAET,IAAV3C,EACF4B,EAAQ4B,gBAAgBb,GAExBf,EAAQ6B,aAAad,GAAgB,IAAV3C,EAAiB,GAAKA,EAErD,IACF,GACF,EAoDE0D,OAlDF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MACdY,EAAgB,CAClBlD,OAAQ,CACNmD,SAAUb,EAAMc,QAAQC,SACxB5D,KAAM,IACN6D,IAAK,IACLC,OAAQ,KAEVC,MAAO,CACLL,SAAU,YAEZlD,UAAW,CAAC,GASd,OAPAtB,OAAOkE,OAAOP,EAAME,SAASxC,OAAO0C,MAAOQ,EAAclD,QACzDsC,EAAMK,OAASO,EAEXZ,EAAME,SAASgB,OACjB7E,OAAOkE,OAAOP,EAAME,SAASgB,MAAMd,MAAOQ,EAAcM,OAGnD,WACL7E,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIf,EAAUoB,EAAME,SAASP,GACzBW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EAGxCS,EAFkB/D,OAAO4D,KAAKD,EAAMK,OAAOzD,eAAe+C,GAAQK,EAAMK,OAAOV,GAAQiB,EAAcjB,IAE7E9B,QAAO,SAAUuC,EAAOe,GAElD,OADAf,EAAMe,GAAY,GACXf,CACT,GAAG,CAAC,GAECb,EAAcX,IAAaD,EAAYC,KAI5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUiB,GACxCxC,EAAQ4B,gBAAgBY,EAC1B,IACF,GACF,CACF,EASEC,SAAU,CAAC,kBCjFE,SAASC,EAAiBvD,GACvC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCHO,IAAI,EAAMC,KAAKC,IACX,EAAMD,KAAKE,IACXC,EAAQH,KAAKG,MCFT,SAASC,IACtB,IAAIC,EAASC,UAAUC,cAEvB,OAAc,MAAVF,GAAkBA,EAAOG,QAAUC,MAAMC,QAAQL,EAAOG,QACnDH,EAAOG,OAAOG,KAAI,SAAUC,GACjC,OAAOA,EAAKC,MAAQ,IAAMD,EAAKE,OACjC,IAAGC,KAAK,KAGHT,UAAUU,SACnB,CCTe,SAASC,IACtB,OAAQ,iCAAiCC,KAAKd,IAChD,CCCe,SAASe,EAAsB/D,EAASgE,EAAcC,QAC9C,IAAjBD,IACFA,GAAe,QAGO,IAApBC,IACFA,GAAkB,GAGpB,IAAIC,EAAalE,EAAQ+D,wBACrBI,EAAS,EACTC,EAAS,EAETJ,GAAgBrD,EAAcX,KAChCmE,EAASnE,EAAQqE,YAAc,GAAItB,EAAMmB,EAAWI,OAAStE,EAAQqE,aAAmB,EACxFD,EAASpE,EAAQuE,aAAe,GAAIxB,EAAMmB,EAAWM,QAAUxE,EAAQuE,cAAoB,GAG7F,IACIE,GADOhE,EAAUT,GAAWG,EAAUH,GAAWK,QAC3BoE,eAEtBC,GAAoBb,KAAsBI,EAC1CU,GAAKT,EAAW3F,MAAQmG,GAAoBD,EAAiBA,EAAeG,WAAa,IAAMT,EAC/FU,GAAKX,EAAW9B,KAAOsC,GAAoBD,EAAiBA,EAAeK,UAAY,IAAMV,EAC7FE,EAAQJ,EAAWI,MAAQH,EAC3BK,EAASN,EAAWM,OAASJ,EACjC,MAAO,CACLE,MAAOA,EACPE,OAAQA,EACRpC,IAAKyC,EACLvG,MAAOqG,EAAIL,EACXjG,OAAQwG,EAAIL,EACZjG,KAAMoG,EACNA,EAAGA,EACHE,EAAGA,EAEP,CCrCe,SAASE,EAAc/E,GACpC,IAAIkE,EAAaH,EAAsB/D,GAGnCsE,EAAQtE,EAAQqE,YAChBG,EAASxE,EAAQuE,aAUrB,OARI3B,KAAKoC,IAAId,EAAWI,MAAQA,IAAU,IACxCA,EAAQJ,EAAWI,OAGjB1B,KAAKoC,IAAId,EAAWM,OAASA,IAAW,IAC1CA,EAASN,EAAWM,QAGf,CACLG,EAAG3E,EAAQ4E,WACXC,EAAG7E,EAAQ8E,UACXR,MAAOA,EACPE,OAAQA,EAEZ,CCvBe,SAASS,EAASC,EAAQC,GACvC,IAAIC,EAAWD,EAAME,aAAeF,EAAME,cAE1C,GAAIH,EAAOD,SAASE,GAClB,OAAO,EAEJ,GAAIC,GAAYvE,EAAauE,GAAW,CACzC,IAAIE,EAAOH,EAEX,EAAG,CACD,GAAIG,GAAQJ,EAAOK,WAAWD,GAC5B,OAAO,EAITA,EAAOA,EAAKE,YAAcF,EAAKG,IACjC,OAASH,EACX,CAGF,OAAO,CACT,CCrBe,SAAS,EAAiBtF,GACvC,OAAOG,EAAUH,GAAS0F,iBAAiB1F,EAC7C,CCFe,SAAS2F,EAAe3F,GACrC,MAAO,CAAC,QAAS,KAAM,MAAM4F,QAAQ7F,EAAYC,KAAa,CAChE,CCFe,SAAS6F,EAAmB7F,GAEzC,QAASS,EAAUT,GAAWA,EAAQO,cACtCP,EAAQ8F,WAAazF,OAAOyF,UAAUC,eACxC,CCFe,SAASC,EAAchG,GACpC,MAA6B,SAAzBD,EAAYC,GACPA,EAMPA,EAAQiG,cACRjG,EAAQwF,aACR3E,EAAab,GAAWA,EAAQyF,KAAO,OAEvCI,EAAmB7F,EAGvB,CCVA,SAASkG,EAAoBlG,GAC3B,OAAKW,EAAcX,IACoB,UAAvC,EAAiBA,GAASiC,SAInBjC,EAAQmG,aAHN,IAIX,CAwCe,SAASC,EAAgBpG,GAItC,IAHA,IAAIK,EAASF,EAAUH,GACnBmG,EAAeD,EAAoBlG,GAEhCmG,GAAgBR,EAAeQ,IAA6D,WAA5C,EAAiBA,GAAclE,UACpFkE,EAAeD,EAAoBC,GAGrC,OAAIA,IAA+C,SAA9BpG,EAAYoG,IAA0D,SAA9BpG,EAAYoG,IAAwE,WAA5C,EAAiBA,GAAclE,UAC3H5B,EAGF8F,GAhDT,SAA4BnG,GAC1B,IAAIqG,EAAY,WAAWvC,KAAKd,KAGhC,GAFW,WAAWc,KAAKd,MAEfrC,EAAcX,IAII,UAFX,EAAiBA,GAEnBiC,SACb,OAAO,KAIX,IAAIqE,EAAcN,EAAchG,GAMhC,IAJIa,EAAayF,KACfA,EAAcA,EAAYb,MAGrB9E,EAAc2F,IAAgB,CAAC,OAAQ,QAAQV,QAAQ7F,EAAYuG,IAAgB,GAAG,CAC3F,IAAIC,EAAM,EAAiBD,GAI3B,GAAsB,SAAlBC,EAAIC,WAA4C,SAApBD,EAAIE,aAA0C,UAAhBF,EAAIG,UAAiF,IAA1D,CAAC,YAAa,eAAed,QAAQW,EAAII,aAAsBN,GAAgC,WAAnBE,EAAII,YAA2BN,GAAaE,EAAIK,QAAyB,SAAfL,EAAIK,OACjO,OAAON,EAEPA,EAAcA,EAAYd,UAE9B,CAEA,OAAO,IACT,CAgByBqB,CAAmB7G,IAAYK,CACxD,CCpEe,SAASyG,EAAyB3H,GAC/C,MAAO,CAAC,MAAO,UAAUyG,QAAQzG,IAAc,EAAI,IAAM,GAC3D,CCDO,SAAS4H,EAAOjE,EAAK1E,EAAOyE,GACjC,OAAO,EAAQC,EAAK,EAAQ1E,EAAOyE,GACrC,CCFe,SAASmE,EAAmBC,GACzC,OAAOxJ,OAAOkE,OAAO,CAAC,ECDf,CACLS,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GDHuC0I,EACjD,CEHe,SAASC,EAAgB9I,EAAOiD,GAC7C,OAAOA,EAAKpC,QAAO,SAAUkI,EAAS5J,GAEpC,OADA4J,EAAQ5J,GAAOa,EACR+I,CACT,GAAG,CAAC,EACN,CCuFA,SACEpG,KAAM,QACNC,SAAS,EACTC,MAAO,OACPC,GA9EF,SAAeC,GACb,IAAIiG,EAEAhG,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZmB,EAAUf,EAAKe,QACfmF,EAAejG,EAAME,SAASgB,MAC9BgF,EAAgBlG,EAAMmG,cAAcD,cACpCE,EAAgB9E,EAAiBtB,EAAMjC,WACvCsI,EAAOX,EAAyBU,GAEhCE,EADa,CAACnJ,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAClC,SAAW,QAElC,GAAKH,GAAiBC,EAAtB,CAIA,IAAIL,EAxBgB,SAAyBU,EAASvG,GAItD,OAAO4F,EAAsC,iBAH7CW,EAA6B,mBAAZA,EAAyBA,EAAQlK,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CAC/EzI,UAAWiC,EAAMjC,aACbwI,GACkDA,EAAUT,EAAgBS,EAASlJ,GAC7F,CAmBsBoJ,CAAgB3F,EAAQyF,QAASvG,GACjD0G,EAAY/C,EAAcsC,GAC1BU,EAAmB,MAATN,EAAe,EAAMlJ,EAC/ByJ,EAAmB,MAATP,EAAepJ,EAASC,EAClC2J,EAAU7G,EAAMwG,MAAM7I,UAAU2I,GAAOtG,EAAMwG,MAAM7I,UAAU0I,GAAQH,EAAcG,GAAQrG,EAAMwG,MAAM9I,OAAO4I,GAC9GQ,EAAYZ,EAAcG,GAAQrG,EAAMwG,MAAM7I,UAAU0I,GACxDU,EAAoB/B,EAAgBiB,GACpCe,EAAaD,EAA6B,MAATV,EAAeU,EAAkBE,cAAgB,EAAIF,EAAkBG,aAAe,EAAI,EAC3HC,EAAoBN,EAAU,EAAIC,EAAY,EAG9CpF,EAAMmE,EAAcc,GACpBlF,EAAMuF,EAAaN,EAAUJ,GAAOT,EAAce,GAClDQ,EAASJ,EAAa,EAAIN,EAAUJ,GAAO,EAAIa,EAC/CE,EAAS1B,EAAOjE,EAAK0F,EAAQ3F,GAE7B6F,EAAWjB,EACfrG,EAAMmG,cAAcxG,KAASqG,EAAwB,CAAC,GAAyBsB,GAAYD,EAAQrB,EAAsBuB,aAAeF,EAASD,EAAQpB,EAnBzJ,CAoBF,EA4CEtF,OA1CF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MAEdwH,EADU7G,EAAMG,QACWlC,QAC3BqH,OAAoC,IAArBuB,EAA8B,sBAAwBA,EAErD,MAAhBvB,IAKwB,iBAAjBA,IACTA,EAAejG,EAAME,SAASxC,OAAO+J,cAAcxB,MAahDpC,EAAS7D,EAAME,SAASxC,OAAQuI,KAQrCjG,EAAME,SAASgB,MAAQ+E,EACzB,EASE5E,SAAU,CAAC,iBACXqG,iBAAkB,CAAC,oBCnGN,SAASC,EAAa5J,GACnC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCOA,IAAIqG,EAAa,CACf5G,IAAK,OACL9D,MAAO,OACPD,OAAQ,OACRE,KAAM,QAeD,SAAS0K,GAAYlH,GAC1B,IAAImH,EAEApK,EAASiD,EAAMjD,OACfqK,EAAapH,EAAMoH,WACnBhK,EAAY4C,EAAM5C,UAClBiK,EAAYrH,EAAMqH,UAClBC,EAAUtH,EAAMsH,QAChBpH,EAAWF,EAAME,SACjBqH,EAAkBvH,EAAMuH,gBACxBC,EAAWxH,EAAMwH,SACjBC,EAAezH,EAAMyH,aACrBC,EAAU1H,EAAM0H,QAChBC,EAAaL,EAAQ1E,EACrBA,OAAmB,IAAf+E,EAAwB,EAAIA,EAChCC,EAAaN,EAAQxE,EACrBA,OAAmB,IAAf8E,EAAwB,EAAIA,EAEhCC,EAAgC,mBAAjBJ,EAA8BA,EAAa,CAC5D7E,EAAGA,EACHE,IACG,CACHF,EAAGA,EACHE,GAGFF,EAAIiF,EAAMjF,EACVE,EAAI+E,EAAM/E,EACV,IAAIgF,EAAOR,EAAQrL,eAAe,KAC9B8L,EAAOT,EAAQrL,eAAe,KAC9B+L,EAAQxL,EACRyL,EAAQ,EACRC,EAAM5J,OAEV,GAAIkJ,EAAU,CACZ,IAAIpD,EAAeC,EAAgBtH,GAC/BoL,EAAa,eACbC,EAAY,cAEZhE,IAAiBhG,EAAUrB,IAGmB,WAA5C,EAFJqH,EAAeN,EAAmB/G,IAECmD,UAAsC,aAAbA,IAC1DiI,EAAa,eACbC,EAAY,gBAOZhL,IAAc,IAAQA,IAAcZ,GAAQY,IAAcb,IAAU8K,IAAczK,KACpFqL,EAAQ3L,EAGRwG,IAFc4E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeD,OACzF2B,EAAa+D,IACEf,EAAW3E,OAC1BK,GAAKyE,EAAkB,GAAK,GAG1BnK,IAAcZ,IAASY,IAAc,GAAOA,IAAcd,GAAW+K,IAAczK,KACrFoL,EAAQzL,EAGRqG,IAFc8E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeH,MACzF6B,EAAagE,IACEhB,EAAW7E,MAC1BK,GAAK2E,EAAkB,GAAK,EAEhC,CAEA,IAgBMc,EAhBFC,EAAe5M,OAAOkE,OAAO,CAC/BM,SAAUA,GACTsH,GAAYP,GAEXsB,GAAyB,IAAjBd,EAlFd,SAA2BrI,EAAM8I,GAC/B,IAAItF,EAAIxD,EAAKwD,EACTE,EAAI1D,EAAK0D,EACT0F,EAAMN,EAAIO,kBAAoB,EAClC,MAAO,CACL7F,EAAG5B,EAAM4B,EAAI4F,GAAOA,GAAO,EAC3B1F,EAAG9B,EAAM8B,EAAI0F,GAAOA,GAAO,EAE/B,CA0EsCE,CAAkB,CACpD9F,EAAGA,EACHE,GACC1E,EAAUrB,IAAW,CACtB6F,EAAGA,EACHE,GAMF,OAHAF,EAAI2F,EAAM3F,EACVE,EAAIyF,EAAMzF,EAENyE,EAGK7L,OAAOkE,OAAO,CAAC,EAAG0I,IAAeD,EAAiB,CAAC,GAAkBJ,GAASF,EAAO,IAAM,GAAIM,EAAeL,GAASF,EAAO,IAAM,GAAIO,EAAe5D,WAAayD,EAAIO,kBAAoB,IAAM,EAAI,aAAe7F,EAAI,OAASE,EAAI,MAAQ,eAAiBF,EAAI,OAASE,EAAI,SAAUuF,IAG5R3M,OAAOkE,OAAO,CAAC,EAAG0I,IAAenB,EAAkB,CAAC,GAAmBc,GAASF,EAAOjF,EAAI,KAAO,GAAIqE,EAAgBa,GAASF,EAAOlF,EAAI,KAAO,GAAIuE,EAAgB1C,UAAY,GAAI0C,GAC9L,CAuDA,UACEnI,KAAM,gBACNC,SAAS,EACTC,MAAO,cACPC,GAzDF,SAAuBwJ,GACrB,IAAItJ,EAAQsJ,EAAMtJ,MACdc,EAAUwI,EAAMxI,QAChByI,EAAwBzI,EAAQoH,gBAChCA,OAA4C,IAA1BqB,GAA0CA,EAC5DC,EAAoB1I,EAAQqH,SAC5BA,OAAiC,IAAtBqB,GAAsCA,EACjDC,EAAwB3I,EAAQsH,aAChCA,OAAyC,IAA1BqB,GAA0CA,EAYzDR,EAAe,CACjBlL,UAAWuD,EAAiBtB,EAAMjC,WAClCiK,UAAWL,EAAa3H,EAAMjC,WAC9BL,OAAQsC,EAAME,SAASxC,OACvBqK,WAAY/H,EAAMwG,MAAM9I,OACxBwK,gBAAiBA,EACjBG,QAAoC,UAA3BrI,EAAMc,QAAQC,UAGgB,MAArCf,EAAMmG,cAAcD,gBACtBlG,EAAMK,OAAO3C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAO3C,OAAQmK,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACvGhB,QAASjI,EAAMmG,cAAcD,cAC7BrF,SAAUb,EAAMc,QAAQC,SACxBoH,SAAUA,EACVC,aAAcA,OAIe,MAA7BpI,EAAMmG,cAAcjF,QACtBlB,EAAMK,OAAOa,MAAQ7E,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAOa,MAAO2G,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACrGhB,QAASjI,EAAMmG,cAAcjF,MAC7BL,SAAU,WACVsH,UAAU,EACVC,aAAcA,OAIlBpI,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,wBAAyBsC,EAAMjC,WAEnC,EAQE2L,KAAM,CAAC,GChLT,IAAIC,GAAU,CACZA,SAAS,GAsCX,UACEhK,KAAM,iBACNC,SAAS,EACTC,MAAO,QACPC,GAAI,WAAe,EACnBY,OAxCF,SAAgBX,GACd,IAAIC,EAAQD,EAAKC,MACb4J,EAAW7J,EAAK6J,SAChB9I,EAAUf,EAAKe,QACf+I,EAAkB/I,EAAQgJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAkBjJ,EAAQkJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7C9K,EAASF,EAAUiB,EAAME,SAASxC,QAClCuM,EAAgB,GAAGjM,OAAOgC,EAAMiK,cAActM,UAAWqC,EAAMiK,cAAcvM,QAYjF,OAVIoM,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaC,iBAAiB,SAAUP,EAASQ,OAAQT,GAC3D,IAGEK,GACF/K,EAAOkL,iBAAiB,SAAUP,EAASQ,OAAQT,IAG9C,WACDG,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaG,oBAAoB,SAAUT,EAASQ,OAAQT,GAC9D,IAGEK,GACF/K,EAAOoL,oBAAoB,SAAUT,EAASQ,OAAQT,GAE1D,CACF,EASED,KAAM,CAAC,GC/CT,IAAIY,GAAO,CACTnN,KAAM,QACND,MAAO,OACPD,OAAQ,MACR+D,IAAK,UAEQ,SAASuJ,GAAqBxM,GAC3C,OAAOA,EAAUyM,QAAQ,0BAA0B,SAAUC,GAC3D,OAAOH,GAAKG,EACd,GACF,CCVA,IAAI,GAAO,CACTnN,MAAO,MACPC,IAAK,SAEQ,SAASmN,GAA8B3M,GACpD,OAAOA,EAAUyM,QAAQ,cAAc,SAAUC,GAC/C,OAAO,GAAKA,EACd,GACF,CCPe,SAASE,GAAgB3L,GACtC,IAAI6J,EAAM9J,EAAUC,GAGpB,MAAO,CACL4L,WAHe/B,EAAIgC,YAInBC,UAHcjC,EAAIkC,YAKtB,CCNe,SAASC,GAAoBpM,GAQ1C,OAAO+D,EAAsB8B,EAAmB7F,IAAUzB,KAAOwN,GAAgB/L,GAASgM,UAC5F,CCXe,SAASK,GAAerM,GAErC,IAAIsM,EAAoB,EAAiBtM,GACrCuM,EAAWD,EAAkBC,SAC7BC,EAAYF,EAAkBE,UAC9BC,EAAYH,EAAkBG,UAElC,MAAO,6BAA6B3I,KAAKyI,EAAWE,EAAYD,EAClE,CCLe,SAASE,GAAgBtM,GACtC,MAAI,CAAC,OAAQ,OAAQ,aAAawF,QAAQ7F,EAAYK,KAAU,EAEvDA,EAAKG,cAAcoM,KAGxBhM,EAAcP,IAASiM,GAAejM,GACjCA,EAGFsM,GAAgB1G,EAAc5F,GACvC,CCJe,SAASwM,GAAkB5M,EAAS6M,GACjD,IAAIC,OAES,IAATD,IACFA,EAAO,IAGT,IAAIvB,EAAeoB,GAAgB1M,GAC/B+M,EAASzB,KAAqE,OAAlDwB,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,MACpH1C,EAAM9J,EAAUmL,GAChB0B,EAASD,EAAS,CAAC9C,GAAK7K,OAAO6K,EAAIxF,gBAAkB,GAAI4H,GAAef,GAAgBA,EAAe,IAAMA,EAC7G2B,EAAcJ,EAAKzN,OAAO4N,GAC9B,OAAOD,EAASE,EAChBA,EAAY7N,OAAOwN,GAAkB5G,EAAcgH,IACrD,CCzBe,SAASE,GAAiBC,GACvC,OAAO1P,OAAOkE,OAAO,CAAC,EAAGwL,EAAM,CAC7B5O,KAAM4O,EAAKxI,EACXvC,IAAK+K,EAAKtI,EACVvG,MAAO6O,EAAKxI,EAAIwI,EAAK7I,MACrBjG,OAAQ8O,EAAKtI,EAAIsI,EAAK3I,QAE1B,CCqBA,SAAS4I,GAA2BpN,EAASqN,EAAgBlL,GAC3D,OAAOkL,IAAmBxO,EAAWqO,GCzBxB,SAAyBlN,EAASmC,GAC/C,IAAI8H,EAAM9J,EAAUH,GAChBsN,EAAOzH,EAAmB7F,GAC1ByE,EAAiBwF,EAAIxF,eACrBH,EAAQgJ,EAAKhF,YACb9D,EAAS8I,EAAKjF,aACd1D,EAAI,EACJE,EAAI,EAER,GAAIJ,EAAgB,CAClBH,EAAQG,EAAeH,MACvBE,EAASC,EAAeD,OACxB,IAAI+I,EAAiB1J,KAEjB0J,IAAmBA,GAA+B,UAAbpL,KACvCwC,EAAIF,EAAeG,WACnBC,EAAIJ,EAAeK,UAEvB,CAEA,MAAO,CACLR,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EAAIyH,GAAoBpM,GAC3B6E,EAAGA,EAEP,CDDwD2I,CAAgBxN,EAASmC,IAAa1B,EAAU4M,GAdxG,SAAoCrN,EAASmC,GAC3C,IAAIgL,EAAOpJ,EAAsB/D,GAAS,EAAoB,UAAbmC,GASjD,OARAgL,EAAK/K,IAAM+K,EAAK/K,IAAMpC,EAAQyN,UAC9BN,EAAK5O,KAAO4O,EAAK5O,KAAOyB,EAAQ0N,WAChCP,EAAK9O,OAAS8O,EAAK/K,IAAMpC,EAAQqI,aACjC8E,EAAK7O,MAAQ6O,EAAK5O,KAAOyB,EAAQsI,YACjC6E,EAAK7I,MAAQtE,EAAQsI,YACrB6E,EAAK3I,OAASxE,EAAQqI,aACtB8E,EAAKxI,EAAIwI,EAAK5O,KACd4O,EAAKtI,EAAIsI,EAAK/K,IACP+K,CACT,CAG0HQ,CAA2BN,EAAgBlL,GAAY+K,GEtBlK,SAAyBlN,GACtC,IAAI8M,EAEAQ,EAAOzH,EAAmB7F,GAC1B4N,EAAY7B,GAAgB/L,GAC5B2M,EAA0D,OAAlDG,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,KAChGrI,EAAQ,EAAIgJ,EAAKO,YAAaP,EAAKhF,YAAaqE,EAAOA,EAAKkB,YAAc,EAAGlB,EAAOA,EAAKrE,YAAc,GACvG9D,EAAS,EAAI8I,EAAKQ,aAAcR,EAAKjF,aAAcsE,EAAOA,EAAKmB,aAAe,EAAGnB,EAAOA,EAAKtE,aAAe,GAC5G1D,GAAKiJ,EAAU5B,WAAaI,GAAoBpM,GAChD6E,GAAK+I,EAAU1B,UAMnB,MAJiD,QAA7C,EAAiBS,GAAQW,GAAMS,YACjCpJ,GAAK,EAAI2I,EAAKhF,YAAaqE,EAAOA,EAAKrE,YAAc,GAAKhE,GAGrD,CACLA,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EACHE,EAAGA,EAEP,CFCkMmJ,CAAgBnI,EAAmB7F,IACrO,CG1Be,SAASiO,GAAe9M,GACrC,IAOIkI,EAPAtK,EAAYoC,EAAKpC,UACjBiB,EAAUmB,EAAKnB,QACfb,EAAYgC,EAAKhC,UACjBqI,EAAgBrI,EAAYuD,EAAiBvD,GAAa,KAC1DiK,EAAYjK,EAAY4J,EAAa5J,GAAa,KAClD+O,EAAUnP,EAAU4F,EAAI5F,EAAUuF,MAAQ,EAAItE,EAAQsE,MAAQ,EAC9D6J,EAAUpP,EAAU8F,EAAI9F,EAAUyF,OAAS,EAAIxE,EAAQwE,OAAS,EAGpE,OAAQgD,GACN,KAAK,EACH6B,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI7E,EAAQwE,QAE3B,MAEF,KAAKnG,EACHgL,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI9F,EAAUyF,QAE7B,MAEF,KAAKlG,EACH+K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI5F,EAAUuF,MAC3BO,EAAGsJ,GAEL,MAEF,KAAK5P,EACH8K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI3E,EAAQsE,MACzBO,EAAGsJ,GAEL,MAEF,QACE9E,EAAU,CACR1E,EAAG5F,EAAU4F,EACbE,EAAG9F,EAAU8F,GAInB,IAAIuJ,EAAW5G,EAAgBV,EAAyBU,GAAiB,KAEzE,GAAgB,MAAZ4G,EAAkB,CACpB,IAAI1G,EAAmB,MAAb0G,EAAmB,SAAW,QAExC,OAAQhF,GACN,KAAK1K,EACH2K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAC7E,MAEF,KAAK/I,EACH0K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAKnF,CAEA,OAAO2B,CACT,CC3De,SAASgF,GAAejN,EAAOc,QAC5B,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACXqM,EAAqBD,EAASnP,UAC9BA,OAAmC,IAAvBoP,EAAgCnN,EAAMjC,UAAYoP,EAC9DC,EAAoBF,EAASnM,SAC7BA,OAAiC,IAAtBqM,EAA+BpN,EAAMe,SAAWqM,EAC3DC,EAAoBH,EAASI,SAC7BA,OAAiC,IAAtBD,EAA+B7P,EAAkB6P,EAC5DE,EAAwBL,EAASM,aACjCA,OAAyC,IAA1BD,EAAmC9P,EAAW8P,EAC7DE,EAAwBP,EAASQ,eACjCA,OAA2C,IAA1BD,EAAmC/P,EAAS+P,EAC7DE,EAAuBT,EAASU,YAChCA,OAAuC,IAAzBD,GAA0CA,EACxDE,EAAmBX,EAAS3G,QAC5BA,OAA+B,IAArBsH,EAA8B,EAAIA,EAC5ChI,EAAgBD,EAAsC,iBAAZW,EAAuBA,EAAUT,EAAgBS,EAASlJ,IACpGyQ,EAAaJ,IAAmBhQ,EAASC,EAAYD,EACrDqK,EAAa/H,EAAMwG,MAAM9I,OACzBkB,EAAUoB,EAAME,SAAS0N,EAAcE,EAAaJ,GACpDK,EJkBS,SAAyBnP,EAAS0O,EAAUE,EAAczM,GACvE,IAAIiN,EAAmC,oBAAbV,EAlB5B,SAA4B1O,GAC1B,IAAIpB,EAAkBgO,GAAkB5G,EAAchG,IAElDqP,EADoB,CAAC,WAAY,SAASzJ,QAAQ,EAAiB5F,GAASiC,WAAa,GACnDtB,EAAcX,GAAWoG,EAAgBpG,GAAWA,EAE9F,OAAKS,EAAU4O,GAKRzQ,EAAgBgI,QAAO,SAAUyG,GACtC,OAAO5M,EAAU4M,IAAmBpI,EAASoI,EAAgBgC,IAAmD,SAAhCtP,EAAYsN,EAC9F,IANS,EAOX,CAK6DiC,CAAmBtP,GAAW,GAAGZ,OAAOsP,GAC/F9P,EAAkB,GAAGQ,OAAOgQ,EAAqB,CAACR,IAClDW,EAAsB3Q,EAAgB,GACtC4Q,EAAe5Q,EAAgBK,QAAO,SAAUwQ,EAASpC,GAC3D,IAAIF,EAAOC,GAA2BpN,EAASqN,EAAgBlL,GAK/D,OAJAsN,EAAQrN,IAAM,EAAI+K,EAAK/K,IAAKqN,EAAQrN,KACpCqN,EAAQnR,MAAQ,EAAI6O,EAAK7O,MAAOmR,EAAQnR,OACxCmR,EAAQpR,OAAS,EAAI8O,EAAK9O,OAAQoR,EAAQpR,QAC1CoR,EAAQlR,KAAO,EAAI4O,EAAK5O,KAAMkR,EAAQlR,MAC/BkR,CACT,GAAGrC,GAA2BpN,EAASuP,EAAqBpN,IAK5D,OAJAqN,EAAalL,MAAQkL,EAAalR,MAAQkR,EAAajR,KACvDiR,EAAahL,OAASgL,EAAanR,OAASmR,EAAapN,IACzDoN,EAAa7K,EAAI6K,EAAajR,KAC9BiR,EAAa3K,EAAI2K,EAAapN,IACvBoN,CACT,CInC2BE,CAAgBjP,EAAUT,GAAWA,EAAUA,EAAQ2P,gBAAkB9J,EAAmBzE,EAAME,SAASxC,QAAS4P,EAAUE,EAAczM,GACjKyN,EAAsB7L,EAAsB3C,EAAME,SAASvC,WAC3DuI,EAAgB2G,GAAe,CACjClP,UAAW6Q,EACX5P,QAASmJ,EACThH,SAAU,WACVhD,UAAWA,IAET0Q,EAAmB3C,GAAiBzP,OAAOkE,OAAO,CAAC,EAAGwH,EAAY7B,IAClEwI,EAAoBhB,IAAmBhQ,EAAS+Q,EAAmBD,EAGnEG,EAAkB,CACpB3N,IAAK+M,EAAmB/M,IAAM0N,EAAkB1N,IAAM6E,EAAc7E,IACpE/D,OAAQyR,EAAkBzR,OAAS8Q,EAAmB9Q,OAAS4I,EAAc5I,OAC7EE,KAAM4Q,EAAmB5Q,KAAOuR,EAAkBvR,KAAO0I,EAAc1I,KACvED,MAAOwR,EAAkBxR,MAAQ6Q,EAAmB7Q,MAAQ2I,EAAc3I,OAExE0R,EAAa5O,EAAMmG,cAAckB,OAErC,GAAIqG,IAAmBhQ,GAAUkR,EAAY,CAC3C,IAAIvH,EAASuH,EAAW7Q,GACxB1B,OAAO4D,KAAK0O,GAAiBxO,SAAQ,SAAUhE,GAC7C,IAAI0S,EAAW,CAAC3R,EAAOD,GAAQuH,QAAQrI,IAAQ,EAAI,GAAK,EACpDkK,EAAO,CAAC,EAAKpJ,GAAQuH,QAAQrI,IAAQ,EAAI,IAAM,IACnDwS,EAAgBxS,IAAQkL,EAAOhB,GAAQwI,CACzC,GACF,CAEA,OAAOF,CACT,CCyEA,UACEhP,KAAM,OACNC,SAAS,EACTC,MAAO,OACPC,GA5HF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KAEhB,IAAIK,EAAMmG,cAAcxG,GAAMmP,MAA9B,CAoCA,IAhCA,IAAIC,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAqCA,EACpDG,EAA8BtO,EAAQuO,mBACtC9I,EAAUzF,EAAQyF,QAClB+G,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtB0B,EAAwBxO,EAAQyO,eAChCA,OAA2C,IAA1BD,GAA0CA,EAC3DE,EAAwB1O,EAAQ0O,sBAChCC,EAAqBzP,EAAMc,QAAQ/C,UACnCqI,EAAgB9E,EAAiBmO,GAEjCJ,EAAqBD,IADHhJ,IAAkBqJ,GACqCF,EAjC/E,SAAuCxR,GACrC,GAAIuD,EAAiBvD,KAAeX,EAClC,MAAO,GAGT,IAAIsS,EAAoBnF,GAAqBxM,GAC7C,MAAO,CAAC2M,GAA8B3M,GAAY2R,EAAmBhF,GAA8BgF,GACrG,CA0B6IC,CAA8BF,GAA3E,CAAClF,GAAqBkF,KAChHG,EAAa,CAACH,GAAoBzR,OAAOqR,GAAoBxR,QAAO,SAAUC,EAAKC,GACrF,OAAOD,EAAIE,OAAOsD,EAAiBvD,KAAeX,ECvCvC,SAA8B4C,EAAOc,QAClC,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACX/C,EAAYmP,EAASnP,UACrBuP,EAAWJ,EAASI,SACpBE,EAAeN,EAASM,aACxBjH,EAAU2G,EAAS3G,QACnBgJ,EAAiBrC,EAASqC,eAC1BM,EAAwB3C,EAASsC,sBACjCA,OAAkD,IAA1BK,EAAmC,EAAgBA,EAC3E7H,EAAYL,EAAa5J,GACzB6R,EAAa5H,EAAYuH,EAAiB3R,EAAsBA,EAAoB4H,QAAO,SAAUzH,GACvG,OAAO4J,EAAa5J,KAAeiK,CACrC,IAAK3K,EACDyS,EAAoBF,EAAWpK,QAAO,SAAUzH,GAClD,OAAOyR,EAAsBhL,QAAQzG,IAAc,CACrD,IAEiC,IAA7B+R,EAAkBC,SACpBD,EAAoBF,GAQtB,IAAII,EAAYF,EAAkBjS,QAAO,SAAUC,EAAKC,GAOtD,OANAD,EAAIC,GAAakP,GAAejN,EAAO,CACrCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,IACRjF,EAAiBvD,IACbD,CACT,GAAG,CAAC,GACJ,OAAOzB,OAAO4D,KAAK+P,GAAWC,MAAK,SAAUC,EAAGC,GAC9C,OAAOH,EAAUE,GAAKF,EAAUG,EAClC,GACF,CDH6DC,CAAqBpQ,EAAO,CACnFjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTgJ,eAAgBA,EAChBC,sBAAuBA,IACpBzR,EACP,GAAG,IACCsS,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzB4S,EAAY,IAAIC,IAChBC,GAAqB,EACrBC,EAAwBb,EAAW,GAE9Bc,EAAI,EAAGA,EAAId,EAAWG,OAAQW,IAAK,CAC1C,IAAI3S,EAAY6R,EAAWc,GAEvBC,EAAiBrP,EAAiBvD,GAElC6S,EAAmBjJ,EAAa5J,KAAeT,EAC/CuT,EAAa,CAAC,EAAK5T,GAAQuH,QAAQmM,IAAmB,EACtDrK,EAAMuK,EAAa,QAAU,SAC7B1F,EAAW8B,GAAejN,EAAO,CACnCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdI,YAAaA,EACbrH,QAASA,IAEPuK,EAAoBD,EAAaD,EAAmB1T,EAAQC,EAAOyT,EAAmB3T,EAAS,EAE/FoT,EAAc/J,GAAOyB,EAAWzB,KAClCwK,EAAoBvG,GAAqBuG,IAG3C,IAAIC,EAAmBxG,GAAqBuG,GACxCE,EAAS,GAUb,GARIhC,GACFgC,EAAOC,KAAK9F,EAASwF,IAAmB,GAGtCxB,GACF6B,EAAOC,KAAK9F,EAAS2F,IAAsB,EAAG3F,EAAS4F,IAAqB,GAG1EC,EAAOE,OAAM,SAAUC,GACzB,OAAOA,CACT,IAAI,CACFV,EAAwB1S,EACxByS,GAAqB,EACrB,KACF,CAEAF,EAAUc,IAAIrT,EAAWiT,EAC3B,CAEA,GAAIR,EAqBF,IAnBA,IAEIa,EAAQ,SAAeC,GACzB,IAAIC,EAAmB3B,EAAW4B,MAAK,SAAUzT,GAC/C,IAAIiT,EAASV,EAAU9T,IAAIuB,GAE3B,GAAIiT,EACF,OAAOA,EAAOS,MAAM,EAAGH,GAAIJ,OAAM,SAAUC,GACzC,OAAOA,CACT,GAEJ,IAEA,GAAII,EAEF,OADAd,EAAwBc,EACjB,OAEX,EAESD,EAnBY/B,EAAiB,EAAI,EAmBZ+B,EAAK,GAGpB,UAFFD,EAAMC,GADmBA,KAOpCtR,EAAMjC,YAAc0S,IACtBzQ,EAAMmG,cAAcxG,GAAMmP,OAAQ,EAClC9O,EAAMjC,UAAY0S,EAClBzQ,EAAM0R,OAAQ,EA5GhB,CA8GF,EAQEhK,iBAAkB,CAAC,UACnBgC,KAAM,CACJoF,OAAO,IE7IX,SAAS6C,GAAexG,EAAUY,EAAM6F,GAQtC,YAPyB,IAArBA,IACFA,EAAmB,CACjBrO,EAAG,EACHE,EAAG,IAIA,CACLzC,IAAKmK,EAASnK,IAAM+K,EAAK3I,OAASwO,EAAiBnO,EACnDvG,MAAOiO,EAASjO,MAAQ6O,EAAK7I,MAAQ0O,EAAiBrO,EACtDtG,OAAQkO,EAASlO,OAAS8O,EAAK3I,OAASwO,EAAiBnO,EACzDtG,KAAMgO,EAAShO,KAAO4O,EAAK7I,MAAQ0O,EAAiBrO,EAExD,CAEA,SAASsO,GAAsB1G,GAC7B,MAAO,CAAC,EAAKjO,EAAOD,EAAQE,GAAM2U,MAAK,SAAUC,GAC/C,OAAO5G,EAAS4G,IAAS,CAC3B,GACF,CA+BA,UACEpS,KAAM,OACNC,SAAS,EACTC,MAAO,OACP6H,iBAAkB,CAAC,mBACnB5H,GAlCF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZ0Q,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBkU,EAAmB5R,EAAMmG,cAAc6L,gBACvCC,EAAoBhF,GAAejN,EAAO,CAC5C0N,eAAgB,cAEdwE,EAAoBjF,GAAejN,EAAO,CAC5C4N,aAAa,IAEXuE,EAA2BR,GAAeM,EAAmB5B,GAC7D+B,EAAsBT,GAAeO,EAAmBnK,EAAY6J,GACpES,EAAoBR,GAAsBM,GAC1CG,EAAmBT,GAAsBO,GAC7CpS,EAAMmG,cAAcxG,GAAQ,CAC1BwS,yBAA0BA,EAC1BC,oBAAqBA,EACrBC,kBAAmBA,EACnBC,iBAAkBA,GAEpBtS,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,+BAAgC2U,EAChC,sBAAuBC,GAE3B,GCJA,IACE3S,KAAM,SACNC,SAAS,EACTC,MAAO,OACPwB,SAAU,CAAC,iBACXvB,GA5BF,SAAgBa,GACd,IAAIX,EAAQW,EAAMX,MACdc,EAAUH,EAAMG,QAChBnB,EAAOgB,EAAMhB,KACb4S,EAAkBzR,EAAQuG,OAC1BA,OAA6B,IAApBkL,EAA6B,CAAC,EAAG,GAAKA,EAC/C7I,EAAO,UAAkB,SAAU5L,EAAKC,GAE1C,OADAD,EAAIC,GA5BD,SAAiCA,EAAWyI,EAAOa,GACxD,IAAIjB,EAAgB9E,EAAiBvD,GACjCyU,EAAiB,CAACrV,EAAM,GAAKqH,QAAQ4B,IAAkB,GAAK,EAAI,EAEhErG,EAAyB,mBAAXsH,EAAwBA,EAAOhL,OAAOkE,OAAO,CAAC,EAAGiG,EAAO,CACxEzI,UAAWA,KACPsJ,EACFoL,EAAW1S,EAAK,GAChB2S,EAAW3S,EAAK,GAIpB,OAFA0S,EAAWA,GAAY,EACvBC,GAAYA,GAAY,GAAKF,EACtB,CAACrV,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAAI,CACjD7C,EAAGmP,EACHjP,EAAGgP,GACD,CACFlP,EAAGkP,EACHhP,EAAGiP,EAEP,CASqBC,CAAwB5U,EAAWiC,EAAMwG,MAAOa,GAC1DvJ,CACT,GAAG,CAAC,GACA8U,EAAwBlJ,EAAK1J,EAAMjC,WACnCwF,EAAIqP,EAAsBrP,EAC1BE,EAAImP,EAAsBnP,EAEW,MAArCzD,EAAMmG,cAAcD,gBACtBlG,EAAMmG,cAAcD,cAAc3C,GAAKA,EACvCvD,EAAMmG,cAAcD,cAAczC,GAAKA,GAGzCzD,EAAMmG,cAAcxG,GAAQ+J,CAC9B,GC1BA,IACE/J,KAAM,gBACNC,SAAS,EACTC,MAAO,OACPC,GApBF,SAAuBC,GACrB,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KAKhBK,EAAMmG,cAAcxG,GAAQkN,GAAe,CACzClP,UAAWqC,EAAMwG,MAAM7I,UACvBiB,QAASoB,EAAMwG,MAAM9I,OACrBqD,SAAU,WACVhD,UAAWiC,EAAMjC,WAErB,EAQE2L,KAAM,CAAC,GCgHT,IACE/J,KAAM,kBACNC,SAAS,EACTC,MAAO,OACPC,GA/HF,SAAyBC,GACvB,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KACZoP,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAsCA,EACrD3B,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtBrH,EAAUzF,EAAQyF,QAClBsM,EAAkB/R,EAAQgS,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAwBjS,EAAQkS,aAChCA,OAAyC,IAA1BD,EAAmC,EAAIA,EACtD5H,EAAW8B,GAAejN,EAAO,CACnCsN,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTqH,YAAaA,IAEXxH,EAAgB9E,EAAiBtB,EAAMjC,WACvCiK,EAAYL,EAAa3H,EAAMjC,WAC/BkV,GAAmBjL,EACnBgF,EAAWtH,EAAyBU,GACpC8I,ECrCY,MDqCSlC,ECrCH,IAAM,IDsCxB9G,EAAgBlG,EAAMmG,cAAcD,cACpCmK,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBwV,EAA4C,mBAAjBF,EAA8BA,EAAa3W,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CACvGzI,UAAWiC,EAAMjC,aACbiV,EACFG,EAA2D,iBAAtBD,EAAiC,CACxElG,SAAUkG,EACVhE,QAASgE,GACP7W,OAAOkE,OAAO,CAChByM,SAAU,EACVkC,QAAS,GACRgE,GACCE,EAAsBpT,EAAMmG,cAAckB,OAASrH,EAAMmG,cAAckB,OAAOrH,EAAMjC,WAAa,KACjG2L,EAAO,CACTnG,EAAG,EACHE,EAAG,GAGL,GAAKyC,EAAL,CAIA,GAAI8I,EAAe,CACjB,IAAIqE,EAEAC,EAAwB,MAAbtG,EAAmB,EAAM7P,EACpCoW,EAAuB,MAAbvG,EAAmB/P,EAASC,EACtCoJ,EAAmB,MAAb0G,EAAmB,SAAW,QACpC3F,EAASnB,EAAc8G,GACvBtL,EAAM2F,EAAS8D,EAASmI,GACxB7R,EAAM4F,EAAS8D,EAASoI,GACxBC,EAAWV,GAAU/K,EAAWzB,GAAO,EAAI,EAC3CmN,EAASzL,IAAc1K,EAAQ+S,EAAc/J,GAAOyB,EAAWzB,GAC/DoN,EAAS1L,IAAc1K,GAASyK,EAAWzB,IAAQ+J,EAAc/J,GAGjEL,EAAejG,EAAME,SAASgB,MAC9BwF,EAAYoM,GAAU7M,EAAetC,EAAcsC,GAAgB,CACrE/C,MAAO,EACPE,OAAQ,GAENuQ,GAAqB3T,EAAMmG,cAAc,oBAAsBnG,EAAMmG,cAAc,oBAAoBI,QxBhFtG,CACLvF,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GwB6EFyW,GAAkBD,GAAmBL,GACrCO,GAAkBF,GAAmBJ,GAMrCO,GAAWnO,EAAO,EAAG0K,EAAc/J,GAAMI,EAAUJ,IACnDyN,GAAYd,EAAkB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWF,GAAkBT,EAA4BnG,SAAWyG,EAASK,GAAWF,GAAkBT,EAA4BnG,SACxMgH,GAAYf,GAAmB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWD,GAAkBV,EAA4BnG,SAAW0G,EAASI,GAAWD,GAAkBV,EAA4BnG,SACzMjG,GAAoB/G,EAAME,SAASgB,OAAS8D,EAAgBhF,EAAME,SAASgB,OAC3E+S,GAAelN,GAAiC,MAAbiG,EAAmBjG,GAAkBsF,WAAa,EAAItF,GAAkBuF,YAAc,EAAI,EAC7H4H,GAAwH,OAAjGb,EAA+C,MAAvBD,OAA8B,EAASA,EAAoBpG,IAAqBqG,EAAwB,EAEvJc,GAAY9M,EAAS2M,GAAYE,GACjCE,GAAkBzO,EAAOmN,EAAS,EAAQpR,EAF9B2F,EAAS0M,GAAYG,GAAsBD,IAEKvS,EAAK2F,EAAQyL,EAAS,EAAQrR,EAAK0S,IAAa1S,GAChHyE,EAAc8G,GAAYoH,GAC1B1K,EAAKsD,GAAYoH,GAAkB/M,CACrC,CAEA,GAAI8H,EAAc,CAChB,IAAIkF,GAEAC,GAAyB,MAAbtH,EAAmB,EAAM7P,EAErCoX,GAAwB,MAAbvH,EAAmB/P,EAASC,EAEvCsX,GAAUtO,EAAcgJ,GAExBuF,GAAmB,MAAZvF,EAAkB,SAAW,QAEpCwF,GAAOF,GAAUrJ,EAASmJ,IAE1BK,GAAOH,GAAUrJ,EAASoJ,IAE1BK,IAAuD,IAAxC,CAAC,EAAKzX,GAAMqH,QAAQ4B,GAEnCyO,GAAyH,OAAjGR,GAAgD,MAAvBjB,OAA8B,EAASA,EAAoBlE,IAAoBmF,GAAyB,EAEzJS,GAAaF,GAAeF,GAAOF,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAEzI6F,GAAaH,GAAeJ,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAAUyF,GAE5IK,GAAmBlC,GAAU8B,G1BzH9B,SAAwBlT,EAAK1E,EAAOyE,GACzC,IAAIwT,EAAItP,EAAOjE,EAAK1E,EAAOyE,GAC3B,OAAOwT,EAAIxT,EAAMA,EAAMwT,CACzB,C0BsHoDC,CAAeJ,GAAYN,GAASO,IAAcpP,EAAOmN,EAASgC,GAAaJ,GAAMF,GAAS1B,EAASiC,GAAaJ,IAEpKzO,EAAcgJ,GAAW8F,GACzBtL,EAAKwF,GAAW8F,GAAmBR,EACrC,CAEAxU,EAAMmG,cAAcxG,GAAQ+J,CAvE5B,CAwEF,EAQEhC,iBAAkB,CAAC,WE1HN,SAASyN,GAAiBC,EAAyBrQ,EAAcsD,QAC9D,IAAZA,IACFA,GAAU,GAGZ,ICnBoCrJ,ECJOJ,EFuBvCyW,EAA0B9V,EAAcwF,GACxCuQ,EAAuB/V,EAAcwF,IAf3C,SAAyBnG,GACvB,IAAImN,EAAOnN,EAAQ+D,wBACfI,EAASpB,EAAMoK,EAAK7I,OAAStE,EAAQqE,aAAe,EACpDD,EAASrB,EAAMoK,EAAK3I,QAAUxE,EAAQuE,cAAgB,EAC1D,OAAkB,IAAXJ,GAA2B,IAAXC,CACzB,CAU4DuS,CAAgBxQ,GACtEJ,EAAkBF,EAAmBM,GACrCgH,EAAOpJ,EAAsByS,EAAyBE,EAAsBjN,GAC5EyB,EAAS,CACXc,WAAY,EACZE,UAAW,GAET7C,EAAU,CACZ1E,EAAG,EACHE,EAAG,GAkBL,OAfI4R,IAA4BA,IAA4BhN,MACxB,SAA9B1J,EAAYoG,IAChBkG,GAAetG,MACbmF,GCnCgC9K,EDmCT+F,KClCdhG,EAAUC,IAAUO,EAAcP,GCJxC,CACL4L,YAFyChM,EDQbI,GCNR4L,WACpBE,UAAWlM,EAAQkM,WDGZH,GAAgB3L,IDoCnBO,EAAcwF,KAChBkD,EAAUtF,EAAsBoC,GAAc,IACtCxB,GAAKwB,EAAauH,WAC1BrE,EAAQxE,GAAKsB,EAAasH,WACjB1H,IACTsD,EAAQ1E,EAAIyH,GAAoBrG,KAI7B,CACLpB,EAAGwI,EAAK5O,KAAO2M,EAAOc,WAAa3C,EAAQ1E,EAC3CE,EAAGsI,EAAK/K,IAAM8I,EAAOgB,UAAY7C,EAAQxE,EACzCP,MAAO6I,EAAK7I,MACZE,OAAQ2I,EAAK3I,OAEjB,CGvDA,SAASoS,GAAMC,GACb,IAAItT,EAAM,IAAIoO,IACVmF,EAAU,IAAIC,IACdC,EAAS,GAKb,SAAS3F,EAAK4F,GACZH,EAAQI,IAAID,EAASlW,MACN,GAAG3B,OAAO6X,EAASxU,UAAY,GAAIwU,EAASnO,kBAAoB,IACtEvH,SAAQ,SAAU4V,GACzB,IAAKL,EAAQM,IAAID,GAAM,CACrB,IAAIE,EAAc9T,EAAI3F,IAAIuZ,GAEtBE,GACFhG,EAAKgG,EAET,CACF,IACAL,EAAO3E,KAAK4E,EACd,CAQA,OAzBAJ,EAAUtV,SAAQ,SAAU0V,GAC1B1T,EAAIiP,IAAIyE,EAASlW,KAAMkW,EACzB,IAiBAJ,EAAUtV,SAAQ,SAAU0V,GACrBH,EAAQM,IAAIH,EAASlW,OAExBsQ,EAAK4F,EAET,IACOD,CACT,CClBA,IAEIM,GAAkB,CACpBnY,UAAW,SACX0X,UAAW,GACX1U,SAAU,YAGZ,SAASoV,KACP,IAAK,IAAI1B,EAAO2B,UAAUrG,OAAQsG,EAAO,IAAIpU,MAAMwS,GAAO6B,EAAO,EAAGA,EAAO7B,EAAM6B,IAC/ED,EAAKC,GAAQF,UAAUE,GAGzB,OAAQD,EAAKvE,MAAK,SAAUlT,GAC1B,QAASA,GAAoD,mBAAlCA,EAAQ+D,sBACrC,GACF,CAEO,SAAS4T,GAAgBC,QACL,IAArBA,IACFA,EAAmB,CAAC,GAGtB,IAAIC,EAAoBD,EACpBE,EAAwBD,EAAkBE,iBAC1CA,OAA6C,IAA1BD,EAAmC,GAAKA,EAC3DE,EAAyBH,EAAkBI,eAC3CA,OAA4C,IAA3BD,EAAoCV,GAAkBU,EAC3E,OAAO,SAAsBjZ,EAAWD,EAAQoD,QAC9B,IAAZA,IACFA,EAAU+V,GAGZ,IC/C6B/W,EAC3BgX,ED8CE9W,EAAQ,CACVjC,UAAW,SACXgZ,iBAAkB,GAClBjW,QAASzE,OAAOkE,OAAO,CAAC,EAAG2V,GAAiBW,GAC5C1Q,cAAe,CAAC,EAChBjG,SAAU,CACRvC,UAAWA,EACXD,OAAQA,GAEV4C,WAAY,CAAC,EACbD,OAAQ,CAAC,GAEP2W,EAAmB,GACnBC,GAAc,EACdrN,EAAW,CACb5J,MAAOA,EACPkX,WAAY,SAAoBC,GAC9B,IAAIrW,EAAsC,mBAArBqW,EAAkCA,EAAiBnX,EAAMc,SAAWqW,EACzFC,IACApX,EAAMc,QAAUzE,OAAOkE,OAAO,CAAC,EAAGsW,EAAgB7W,EAAMc,QAASA,GACjEd,EAAMiK,cAAgB,CACpBtM,UAAW0B,EAAU1B,GAAa6N,GAAkB7N,GAAaA,EAAU4Q,eAAiB/C,GAAkB7N,EAAU4Q,gBAAkB,GAC1I7Q,OAAQ8N,GAAkB9N,IAI5B,IEzE4B+X,EAC9B4B,EFwEMN,EDvCG,SAAwBtB,GAErC,IAAIsB,EAAmBvB,GAAMC,GAE7B,OAAO/W,EAAeb,QAAO,SAAUC,EAAK+B,GAC1C,OAAO/B,EAAIE,OAAO+Y,EAAiBvR,QAAO,SAAUqQ,GAClD,OAAOA,EAAShW,QAAUA,CAC5B,IACF,GAAG,GACL,CC8B+ByX,EEzEK7B,EFyEsB,GAAGzX,OAAO2Y,EAAkB3W,EAAMc,QAAQ2U,WExE9F4B,EAAS5B,EAAU5X,QAAO,SAAUwZ,EAAQE,GAC9C,IAAIC,EAAWH,EAAOE,EAAQ5X,MAK9B,OAJA0X,EAAOE,EAAQ5X,MAAQ6X,EAAWnb,OAAOkE,OAAO,CAAC,EAAGiX,EAAUD,EAAS,CACrEzW,QAASzE,OAAOkE,OAAO,CAAC,EAAGiX,EAAS1W,QAASyW,EAAQzW,SACrD4I,KAAMrN,OAAOkE,OAAO,CAAC,EAAGiX,EAAS9N,KAAM6N,EAAQ7N,QAC5C6N,EACEF,CACT,GAAG,CAAC,GAEGhb,OAAO4D,KAAKoX,GAAQlV,KAAI,SAAUhG,GACvC,OAAOkb,EAAOlb,EAChB,MFsGM,OAvCA6D,EAAM+W,iBAAmBA,EAAiBvR,QAAO,SAAUiS,GACzD,OAAOA,EAAE7X,OACX,IAoJFI,EAAM+W,iBAAiB5W,SAAQ,SAAUqI,GACvC,IAAI7I,EAAO6I,EAAM7I,KACb+X,EAAgBlP,EAAM1H,QACtBA,OAA4B,IAAlB4W,EAA2B,CAAC,EAAIA,EAC1ChX,EAAS8H,EAAM9H,OAEnB,GAAsB,mBAAXA,EAAuB,CAChC,IAAIiX,EAAYjX,EAAO,CACrBV,MAAOA,EACPL,KAAMA,EACNiK,SAAUA,EACV9I,QAASA,IAKXkW,EAAiB/F,KAAK0G,GAFT,WAAmB,EAGlC,CACF,IAjIS/N,EAASQ,QAClB,EAMAwN,YAAa,WACX,IAAIX,EAAJ,CAIA,IAAIY,EAAkB7X,EAAME,SACxBvC,EAAYka,EAAgBla,UAC5BD,EAASma,EAAgBna,OAG7B,GAAKyY,GAAiBxY,EAAWD,GAAjC,CASAsC,EAAMwG,MAAQ,CACZ7I,UAAWwX,GAAiBxX,EAAWqH,EAAgBtH,GAAoC,UAA3BsC,EAAMc,QAAQC,UAC9ErD,OAAQiG,EAAcjG,IAOxBsC,EAAM0R,OAAQ,EACd1R,EAAMjC,UAAYiC,EAAMc,QAAQ/C,UAKhCiC,EAAM+W,iBAAiB5W,SAAQ,SAAU0V,GACvC,OAAO7V,EAAMmG,cAAc0P,EAASlW,MAAQtD,OAAOkE,OAAO,CAAC,EAAGsV,EAASnM,KACzE,IAGA,IAFA,IAESoO,EAAQ,EAAGA,EAAQ9X,EAAM+W,iBAAiBhH,OAAQ+H,IAUzD,IAAoB,IAAhB9X,EAAM0R,MAAV,CAMA,IAAIqG,EAAwB/X,EAAM+W,iBAAiBe,GAC/ChY,EAAKiY,EAAsBjY,GAC3BkY,EAAyBD,EAAsBjX,QAC/CoM,OAAsC,IAA3B8K,EAAoC,CAAC,EAAIA,EACpDrY,EAAOoY,EAAsBpY,KAEf,mBAAPG,IACTE,EAAQF,EAAG,CACTE,MAAOA,EACPc,QAASoM,EACTvN,KAAMA,EACNiK,SAAUA,KACN5J,EAdR,MAHEA,EAAM0R,OAAQ,EACdoG,GAAS,CAnCb,CAbA,CAmEF,EAGA1N,QClM2BtK,EDkMV,WACf,OAAO,IAAImY,SAAQ,SAAUC,GAC3BtO,EAASgO,cACTM,EAAQlY,EACV,GACF,ECrMG,WAUL,OATK8W,IACHA,EAAU,IAAImB,SAAQ,SAAUC,GAC9BD,QAAQC,UAAUC,MAAK,WACrBrB,OAAUsB,EACVF,EAAQpY,IACV,GACF,KAGKgX,CACT,GD2LIuB,QAAS,WACPjB,IACAH,GAAc,CAChB,GAGF,IAAKd,GAAiBxY,EAAWD,GAK/B,OAAOkM,EAmCT,SAASwN,IACPJ,EAAiB7W,SAAQ,SAAUL,GACjC,OAAOA,GACT,IACAkX,EAAmB,EACrB,CAEA,OAvCApN,EAASsN,WAAWpW,GAASqX,MAAK,SAAUnY,IACrCiX,GAAenW,EAAQwX,eAC1BxX,EAAQwX,cAActY,EAE1B,IAmCO4J,CACT,CACF,CACO,IAAI2O,GAA4BhC,KGrPnC,GAA4BA,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,EAAa,GAAQ,GAAM,GAAiB,EAAO,MCJrH,GAA4BjC,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,KCQtE,MAEMC,GAAiB,gBAsBjBC,GAAc9Z,IAClB,IAAI+Z,EAAW/Z,EAAQga,aAAa,kBAEpC,IAAKD,GAAyB,MAAbA,EAAkB,CACjC,IAAIE,EAAgBja,EAAQga,aAAa,QAKzC,IAAKC,IAAkBA,EAAcC,SAAS,OAASD,EAAcE,WAAW,KAC9E,OAAO,KAILF,EAAcC,SAAS,OAASD,EAAcE,WAAW,OAC3DF,EAAgB,IAAIA,EAActX,MAAM,KAAK,MAG/CoX,EAAWE,GAAmC,MAAlBA,EAAwBA,EAAcG,OAAS,IAC7E,CAEA,OAAOL,CAAQ,EAGXM,GAAyBra,IAC7B,MAAM+Z,EAAWD,GAAY9Z,GAE7B,OAAI+Z,GACKjU,SAAS+C,cAAckR,GAAYA,EAGrC,IAAI,EAGPO,GAAyBta,IAC7B,MAAM+Z,EAAWD,GAAY9Z,GAC7B,OAAO+Z,EAAWjU,SAAS+C,cAAckR,GAAY,IAAI,EA0BrDQ,GAAuBva,IAC3BA,EAAQwa,cAAc,IAAIC,MAAMZ,IAAgB,EAG5C,GAAYa,MACXA,GAA4B,iBAAXA,UAIO,IAAlBA,EAAOC,SAChBD,EAASA,EAAO,SAGgB,IAApBA,EAAOE,UAGjBC,GAAaH,GAEb,GAAUA,GACLA,EAAOC,OAASD,EAAO,GAAKA,EAGf,iBAAXA,GAAuBA,EAAOvJ,OAAS,EACzCrL,SAAS+C,cAAc6R,GAGzB,KAGHI,GAAY9a,IAChB,IAAK,GAAUA,IAAgD,IAApCA,EAAQ+a,iBAAiB5J,OAClD,OAAO,EAGT,MAAM6J,EAAgF,YAA7DtV,iBAAiB1F,GAASib,iBAAiB,cAE9DC,EAAgBlb,EAAQmb,QAAQ,uBAEtC,IAAKD,EACH,OAAOF,EAGT,GAAIE,IAAkBlb,EAAS,CAC7B,MAAMob,EAAUpb,EAAQmb,QAAQ,WAEhC,GAAIC,GAAWA,EAAQ5V,aAAe0V,EACpC,OAAO,EAGT,GAAgB,OAAZE,EACF,OAAO,CAEX,CAEA,OAAOJ,CAAgB,EAGnBK,GAAarb,IACZA,GAAWA,EAAQ4a,WAAaU,KAAKC,gBAItCvb,EAAQwb,UAAUvW,SAAS,mBAIC,IAArBjF,EAAQyb,SACVzb,EAAQyb,SAGVzb,EAAQ0b,aAAa,aAAoD,UAArC1b,EAAQga,aAAa,aAG5D2B,GAAiB3b,IACrB,IAAK8F,SAASC,gBAAgB6V,aAC5B,OAAO,KAIT,GAAmC,mBAAxB5b,EAAQqF,YAA4B,CAC7C,MAAMwW,EAAO7b,EAAQqF,cACrB,OAAOwW,aAAgB/a,WAAa+a,EAAO,IAC7C,CAEA,OAAI7b,aAAmBc,WACdd,EAIJA,EAAQwF,WAINmW,GAAe3b,EAAQwF,YAHrB,IAGgC,EAGrCsW,GAAO,OAWPC,GAAS/b,IACbA,EAAQuE,YAAY,EAGhByX,GAAY,IACZ3b,OAAO4b,SAAWnW,SAAS6G,KAAK+O,aAAa,qBACxCrb,OAAO4b,OAGT,KAGHC,GAA4B,GAmB5BC,GAAQ,IAAuC,QAAjCrW,SAASC,gBAAgBqW,IAEvCC,GAAqBC,IAnBAC,QAoBN,KACjB,MAAMC,EAAIR,KAGV,GAAIQ,EAAG,CACL,MAAMzb,EAAOub,EAAOG,KACdC,EAAqBF,EAAEtb,GAAGH,GAChCyb,EAAEtb,GAAGH,GAAQub,EAAOK,gBACpBH,EAAEtb,GAAGH,GAAM6b,YAAcN,EAEzBE,EAAEtb,GAAGH,GAAM8b,WAAa,KACtBL,EAAEtb,GAAGH,GAAQ2b,EACNJ,EAAOK,gBAElB,GAjC0B,YAAxB7W,SAASgX,YAENZ,GAA0B/K,QAC7BrL,SAASyF,iBAAiB,oBAAoB,KAC5C,IAAK,MAAMgR,KAAYL,GACrBK,GACF,IAIJL,GAA0B7J,KAAKkK,IAE/BA,GAsBA,EAGEQ,GAAUR,IACU,mBAAbA,GACTA,GACF,EAGIS,GAAyB,CAACT,EAAUU,EAAmBC,GAAoB,KAC/E,IAAKA,EAEH,YADAH,GAAQR,GAIV,MACMY,EAnMiCnd,KACvC,IAAKA,EACH,OAAO,EAIT,IAAI,mBACFod,EAAkB,gBAClBC,GACEhd,OAAOqF,iBAAiB1F,GAC5B,MAAMsd,EAA0BC,OAAOC,WAAWJ,GAC5CK,EAAuBF,OAAOC,WAAWH,GAE/C,OAAKC,GAA4BG,GAKjCL,EAAqBA,EAAmBza,MAAM,KAAK,GACnD0a,EAAkBA,EAAgB1a,MAAM,KAAK,GAjFf,KAkFtB4a,OAAOC,WAAWJ,GAAsBG,OAAOC,WAAWH,KANzD,CAMoG,EA+KpFK,CAAiCT,GADlC,EAExB,IAAIU,GAAS,EAEb,MAAMC,EAAU,EACd5Q,aAEIA,IAAWiQ,IAIfU,GAAS,EACTV,EAAkBxR,oBAAoBoO,GAAgB+D,GACtDb,GAAQR,GAAS,EAGnBU,EAAkB1R,iBAAiBsO,GAAgB+D,GACnDC,YAAW,KACJF,GACHpD,GAAqB0C,EACvB,GACCE,EAAiB,EAahBW,GAAuB,CAACjR,EAAMkR,EAAeC,EAAeC,KAChE,MAAMC,EAAarR,EAAKsE,OACxB,IAAI+H,EAAQrM,EAAKjH,QAAQmY,GAGzB,OAAe,IAAX7E,GACM8E,GAAiBC,EAAiBpR,EAAKqR,EAAa,GAAKrR,EAAK,IAGxEqM,GAAS8E,EAAgB,GAAK,EAE1BC,IACF/E,GAASA,EAAQgF,GAAcA,GAG1BrR,EAAKjK,KAAKC,IAAI,EAAGD,KAAKE,IAAIoW,EAAOgF,EAAa,KAAI,EAarDC,GAAiB,qBACjBC,GAAiB,OACjBC,GAAgB,SAChBC,GAAgB,CAAC,EAEvB,IAAIC,GAAW,EACf,MAAMC,GAAe,CACnBC,WAAY,YACZC,WAAY,YAERC,GAAe,IAAI5H,IAAI,CAAC,QAAS,WAAY,UAAW,YAAa,cAAe,aAAc,iBAAkB,YAAa,WAAY,YAAa,cAAe,YAAa,UAAW,WAAY,QAAS,oBAAqB,aAAc,YAAa,WAAY,cAAe,cAAe,cAAe,YAAa,eAAgB,gBAAiB,eAAgB,gBAAiB,aAAc,QAAS,OAAQ,SAAU,QAAS,SAAU,SAAU,UAAW,WAAY,OAAQ,SAAU,eAAgB,SAAU,OAAQ,mBAAoB,mBAAoB,QAAS,QAAS,WAK/lB,SAAS6H,GAAa5e,EAAS6e,GAC7B,OAAOA,GAAO,GAAGA,MAAQN,QAAgBve,EAAQue,UAAYA,IAC/D,CAEA,SAASO,GAAiB9e,GACxB,MAAM6e,EAAMD,GAAa5e,GAGzB,OAFAA,EAAQue,SAAWM,EACnBP,GAAcO,GAAOP,GAAcO,IAAQ,CAAC,EACrCP,GAAcO,EACvB,CA0CA,SAASE,GAAYC,EAAQC,EAAUC,EAAqB,MAC1D,OAAOzhB,OAAO0hB,OAAOH,GAAQpM,MAAKwM,GAASA,EAAMH,WAAaA,GAAYG,EAAMF,qBAAuBA,GACzG,CAEA,SAASG,GAAoBC,EAAmB1B,EAAS2B,GACvD,MAAMC,EAAiC,iBAAZ5B,EAErBqB,EAAWO,EAAcD,EAAqB3B,GAAW2B,EAC/D,IAAIE,EAAYC,GAAaJ,GAM7B,OAJKX,GAAavH,IAAIqI,KACpBA,EAAYH,GAGP,CAACE,EAAaP,EAAUQ,EACjC,CAEA,SAASE,GAAW3f,EAASsf,EAAmB1B,EAAS2B,EAAoBK,GAC3E,GAAiC,iBAAtBN,IAAmCtf,EAC5C,OAGF,IAAKwf,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GAGzF,GAAID,KAAqBd,GAAc,CACrC,MAAMqB,EAAe3e,GACZ,SAAUke,GACf,IAAKA,EAAMU,eAAiBV,EAAMU,gBAAkBV,EAAMW,iBAAmBX,EAAMW,eAAe9a,SAASma,EAAMU,eAC/G,OAAO5e,EAAGjD,KAAK+hB,KAAMZ,EAEzB,EAGFH,EAAWY,EAAaZ,EAC1B,CAEA,MAAMD,EAASF,GAAiB9e,GAC1BigB,EAAWjB,EAAOS,KAAeT,EAAOS,GAAa,CAAC,GACtDS,EAAmBnB,GAAYkB,EAAUhB,EAAUO,EAAc5B,EAAU,MAEjF,GAAIsC,EAEF,YADAA,EAAiBN,OAASM,EAAiBN,QAAUA,GAIvD,MAAMf,EAAMD,GAAaK,EAAUK,EAAkB1T,QAAQuS,GAAgB,KACvEjd,EAAKse,EAzEb,SAAoCxf,EAAS+Z,EAAU7Y,GACrD,OAAO,SAAS0c,EAAQwB,GACtB,MAAMe,EAAcngB,EAAQogB,iBAAiBrG,GAE7C,IAAK,IAAI,OACP/M,GACEoS,EAAOpS,GAAUA,IAAWgT,KAAMhT,EAASA,EAAOxH,WACpD,IAAK,MAAM6a,KAAcF,EACvB,GAAIE,IAAerT,EAYnB,OARAsT,GAAWlB,EAAO,CAChBW,eAAgB/S,IAGd4Q,EAAQgC,QACVW,GAAaC,IAAIxgB,EAASof,EAAMqB,KAAM1G,EAAU7Y,GAG3CA,EAAGwf,MAAM1T,EAAQ,CAACoS,GAG/B,CACF,CAiD2BuB,CAA2B3gB,EAAS4d,EAASqB,GAvFxE,SAA0Bjf,EAASkB,GACjC,OAAO,SAAS0c,EAAQwB,GAStB,OARAkB,GAAWlB,EAAO,CAChBW,eAAgB/f,IAGd4d,EAAQgC,QACVW,GAAaC,IAAIxgB,EAASof,EAAMqB,KAAMvf,GAGjCA,EAAGwf,MAAM1gB,EAAS,CAACof,GAC5B,CACF,CA2EoFwB,CAAiB5gB,EAASif,GAC5G/d,EAAGge,mBAAqBM,EAAc5B,EAAU,KAChD1c,EAAG+d,SAAWA,EACd/d,EAAG0e,OAASA,EACZ1e,EAAGqd,SAAWM,EACdoB,EAASpB,GAAO3d,EAChBlB,EAAQuL,iBAAiBkU,EAAWve,EAAIse,EAC1C,CAEA,SAASqB,GAAc7gB,EAASgf,EAAQS,EAAW7B,EAASsB,GAC1D,MAAMhe,EAAK6d,GAAYC,EAAOS,GAAY7B,EAASsB,GAE9Che,IAILlB,EAAQyL,oBAAoBgU,EAAWve,EAAI4f,QAAQ5B,WAC5CF,EAAOS,GAAWve,EAAGqd,UAC9B,CAEA,SAASwC,GAAyB/gB,EAASgf,EAAQS,EAAWuB,GAC5D,MAAMC,EAAoBjC,EAAOS,IAAc,CAAC,EAEhD,IAAK,MAAMyB,KAAczjB,OAAO4D,KAAK4f,GACnC,GAAIC,EAAWhH,SAAS8G,GAAY,CAClC,MAAM5B,EAAQ6B,EAAkBC,GAChCL,GAAc7gB,EAASgf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAClE,CAEJ,CAEA,SAASQ,GAAaN,GAGpB,OADAA,EAAQA,EAAMxT,QAAQwS,GAAgB,IAC/BI,GAAaY,IAAUA,CAChC,CAEA,MAAMmB,GAAe,CACnBY,GAAGnhB,EAASof,EAAOxB,EAAS2B,GAC1BI,GAAW3f,EAASof,EAAOxB,EAAS2B,GAAoB,EAC1D,EAEA6B,IAAIphB,EAASof,EAAOxB,EAAS2B,GAC3BI,GAAW3f,EAASof,EAAOxB,EAAS2B,GAAoB,EAC1D,EAEAiB,IAAIxgB,EAASsf,EAAmB1B,EAAS2B,GACvC,GAAiC,iBAAtBD,IAAmCtf,EAC5C,OAGF,MAAOwf,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GACrF8B,EAAc5B,IAAcH,EAC5BN,EAASF,GAAiB9e,GAC1BihB,EAAoBjC,EAAOS,IAAc,CAAC,EAC1C6B,EAAchC,EAAkBnF,WAAW,KAEjD,QAAwB,IAAb8E,EAAX,CAUA,GAAIqC,EACF,IAAK,MAAMC,KAAgB9jB,OAAO4D,KAAK2d,GACrC+B,GAAyB/gB,EAASgf,EAAQuC,EAAcjC,EAAkBzM,MAAM,IAIpF,IAAK,MAAM2O,KAAe/jB,OAAO4D,KAAK4f,GAAoB,CACxD,MAAMC,EAAaM,EAAY5V,QAAQyS,GAAe,IAEtD,IAAKgD,GAAe/B,EAAkBpF,SAASgH,GAAa,CAC1D,MAAM9B,EAAQ6B,EAAkBO,GAChCX,GAAc7gB,EAASgf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAClE,CACF,CAfA,KARA,CAEE,IAAKzhB,OAAO4D,KAAK4f,GAAmB9P,OAClC,OAGF0P,GAAc7gB,EAASgf,EAAQS,EAAWR,EAAUO,EAAc5B,EAAU,KAE9E,CAgBF,EAEA6D,QAAQzhB,EAASof,EAAO3H,GACtB,GAAqB,iBAAV2H,IAAuBpf,EAChC,OAAO,KAGT,MAAMwc,EAAIR,KAGV,IAAI0F,EAAc,KACdC,GAAU,EACVC,GAAiB,EACjBC,GAAmB,EAJHzC,IADFM,GAAaN,IAOZ5C,IACjBkF,EAAclF,EAAE/B,MAAM2E,EAAO3H,GAC7B+E,EAAExc,GAASyhB,QAAQC,GACnBC,GAAWD,EAAYI,uBACvBF,GAAkBF,EAAYK,gCAC9BF,EAAmBH,EAAYM,sBAGjC,IAAIC,EAAM,IAAIxH,MAAM2E,EAAO,CACzBuC,UACAO,YAAY,IAgBd,OAdAD,EAAM3B,GAAW2B,EAAKxK,GAElBoK,GACFI,EAAIE,iBAGFP,GACF5hB,EAAQwa,cAAcyH,GAGpBA,EAAIJ,kBAAoBH,GAC1BA,EAAYS,iBAGPF,CACT,GAIF,SAAS3B,GAAWziB,EAAKukB,GACvB,IAAK,MAAO7kB,EAAKa,KAAUX,OAAO4kB,QAAQD,GAAQ,CAAC,GACjD,IACEvkB,EAAIN,GAAOa,CACb,CAAE,MAAOkkB,GACP7kB,OAAOC,eAAeG,EAAKN,EAAK,CAC9BglB,cAAc,EAEd3kB,IAAG,IACMQ,GAIb,CAGF,OAAOP,CACT,CAYA,MAAM2kB,GAAa,IAAI7Q,IACjB8Q,GAAO,CACXjQ,IAAIxS,EAASzC,EAAKyN,GACXwX,GAAWpL,IAAIpX,IAClBwiB,GAAWhQ,IAAIxS,EAAS,IAAI2R,KAG9B,MAAM+Q,EAAcF,GAAW5kB,IAAIoC,GAG9B0iB,EAAYtL,IAAI7Z,IAA6B,IAArBmlB,EAAYC,KAMzCD,EAAYlQ,IAAIjV,EAAKyN,GAJnB4X,QAAQC,MAAM,+EAA+Exf,MAAMyf,KAAKJ,EAAYrhB,QAAQ,MAKhI,EAEAzD,IAAG,CAACoC,EAASzC,IACPilB,GAAWpL,IAAIpX,IACVwiB,GAAW5kB,IAAIoC,GAASpC,IAAIL,IAG9B,KAGTwlB,OAAO/iB,EAASzC,GACd,IAAKilB,GAAWpL,IAAIpX,GAClB,OAGF,MAAM0iB,EAAcF,GAAW5kB,IAAIoC,GACnC0iB,EAAYM,OAAOzlB,GAEM,IAArBmlB,EAAYC,MACdH,GAAWQ,OAAOhjB,EAEtB,GAUF,SAASijB,GAAc7kB,GACrB,GAAc,SAAVA,EACF,OAAO,EAGT,GAAc,UAAVA,EACF,OAAO,EAGT,GAAIA,IAAUmf,OAAOnf,GAAOkC,WAC1B,OAAOid,OAAOnf,GAGhB,GAAc,KAAVA,GAA0B,SAAVA,EAClB,OAAO,KAGT,GAAqB,iBAAVA,EACT,OAAOA,EAGT,IACE,OAAO8kB,KAAKC,MAAMC,mBAAmBhlB,GACvC,CAAE,MAAOkkB,GACP,OAAOlkB,CACT,CACF,CAEA,SAASilB,GAAiB9lB,GACxB,OAAOA,EAAIqO,QAAQ,UAAU0X,GAAO,IAAIA,EAAIpjB,iBAC9C,CAEA,MAAMqjB,GAAc,CAClBC,iBAAiBxjB,EAASzC,EAAKa,GAC7B4B,EAAQ6B,aAAa,WAAWwhB,GAAiB9lB,KAAQa,EAC3D,EAEAqlB,oBAAoBzjB,EAASzC,GAC3ByC,EAAQ4B,gBAAgB,WAAWyhB,GAAiB9lB,KACtD,EAEAmmB,kBAAkB1jB,GAChB,IAAKA,EACH,MAAO,CAAC,EAGV,MAAM0B,EAAa,CAAC,EACdiiB,EAASlmB,OAAO4D,KAAKrB,EAAQ4jB,SAAShd,QAAOrJ,GAAOA,EAAI4c,WAAW,QAAU5c,EAAI4c,WAAW,cAElG,IAAK,MAAM5c,KAAOomB,EAAQ,CACxB,IAAIE,EAAUtmB,EAAIqO,QAAQ,MAAO,IACjCiY,EAAUA,EAAQC,OAAO,GAAG5jB,cAAgB2jB,EAAQhR,MAAM,EAAGgR,EAAQ1S,QACrEzP,EAAWmiB,GAAWZ,GAAcjjB,EAAQ4jB,QAAQrmB,GACtD,CAEA,OAAOmE,CACT,EAEAqiB,iBAAgB,CAAC/jB,EAASzC,IACjB0lB,GAAcjjB,EAAQga,aAAa,WAAWqJ,GAAiB9lB,QAe1E,MAAMymB,GAEOC,qBACT,MAAO,CAAC,CACV,CAEWC,yBACT,MAAO,CAAC,CACV,CAEWzH,kBACT,MAAM,IAAI0H,MAAM,sEAClB,CAEAC,WAAWC,GAMT,OALAA,EAASrE,KAAKsE,gBAAgBD,GAC9BA,EAASrE,KAAKuE,kBAAkBF,GAEhCrE,KAAKwE,iBAAiBH,GAEfA,CACT,CAEAE,kBAAkBF,GAChB,OAAOA,CACT,CAEAC,gBAAgBD,EAAQrkB,GACtB,MAAMykB,EAAa,GAAUzkB,GAAWujB,GAAYQ,iBAAiB/jB,EAAS,UAAY,CAAC,EAE3F,MAAO,IAAKggB,KAAK0E,YAAYT,WACD,iBAAfQ,EAA0BA,EAAa,CAAC,KAC/C,GAAUzkB,GAAWujB,GAAYG,kBAAkB1jB,GAAW,CAAC,KAC7C,iBAAXqkB,EAAsBA,EAAS,CAAC,EAE/C,CAEAG,iBAAiBH,EAAQM,EAAc3E,KAAK0E,YAAYR,aACtD,IAAK,MAAM3hB,KAAY9E,OAAO4D,KAAKsjB,GAAc,CAC/C,MAAMC,EAAgBD,EAAYpiB,GAC5BnE,EAAQimB,EAAO9hB,GACfsiB,EAAY,GAAUzmB,GAAS,UA1uBrCsc,OADSA,EA2uB+Ctc,GAzuBnD,GAAGsc,IAGLjd,OAAOM,UAAUuC,SAASrC,KAAKyc,GAAQoK,MAAM,eAAe,GAAG5kB,cAwuBlE,IAAK,IAAI6kB,OAAOH,GAAe9gB,KAAK+gB,GAClC,MAAM,IAAIG,UAAU,GAAGhF,KAAK0E,YAAYjI,KAAKwI,0BAA0B1iB,qBAA4BsiB,yBAAiCD,MAExI,CAhvBWlK,KAivBb,EAmBF,MAAMwK,WAAsBlB,GAC1BU,YAAY1kB,EAASqkB,GACnBc,SACAnlB,EAAU6a,GAAW7a,MAMrBggB,KAAKoF,SAAWplB,EAChBggB,KAAKqF,QAAUrF,KAAKoE,WAAWC,GAC/B5B,GAAKjQ,IAAIwN,KAAKoF,SAAUpF,KAAK0E,YAAYY,SAAUtF,MACrD,CAGAuF,UACE9C,GAAKM,OAAO/C,KAAKoF,SAAUpF,KAAK0E,YAAYY,UAC5C/E,GAAaC,IAAIR,KAAKoF,SAAUpF,KAAK0E,YAAYc,WAEjD,IAAK,MAAMC,KAAgBhoB,OAAOioB,oBAAoB1F,MACpDA,KAAKyF,GAAgB,IAEzB,CAEAE,eAAepJ,EAAUvc,EAAS4lB,GAAa,GAC7C5I,GAAuBT,EAAUvc,EAAS4lB,EAC5C,CAEAxB,WAAWC,GAMT,OALAA,EAASrE,KAAKsE,gBAAgBD,EAAQrE,KAAKoF,UAC3Cf,EAASrE,KAAKuE,kBAAkBF,GAEhCrE,KAAKwE,iBAAiBH,GAEfA,CACT,CAGAwB,mBAAmB7lB,GACjB,OAAOyiB,GAAK7kB,IAAIid,GAAW7a,GAAUggB,KAAKsF,SAC5C,CAEAO,2BAA2B7lB,EAASqkB,EAAS,CAAC,GAC5C,OAAOrE,KAAK8F,YAAY9lB,IAAY,IAAIggB,KAAKhgB,EAA2B,iBAAXqkB,EAAsBA,EAAS,KAC9F,CAEW0B,qBACT,MApDY,OAqDd,CAEWT,sBACT,MAAO,MAAMtF,KAAKvD,MACpB,CAEW+I,uBACT,MAAO,IAAIxF,KAAKsF,UAClB,CAEAO,iBAAiB9kB,GACf,MAAO,GAAGA,IAAOif,KAAKwF,WACxB,EAWF,MAAMQ,GAAuB,CAACC,EAAWC,EAAS,UAChD,MAAMC,EAAa,gBAAgBF,EAAUT,YACvCzkB,EAAOklB,EAAUxJ,KACvB8D,GAAaY,GAAGrb,SAAUqgB,EAAY,qBAAqBplB,OAAU,SAAUqe,GAK7E,GAJI,CAAC,IAAK,QAAQlF,SAAS8F,KAAKoG,UAC9BhH,EAAM+C,iBAGJ9G,GAAW2E,MACb,OAGF,MAAMhT,EAASsN,GAAuB0F,OAASA,KAAK7E,QAAQ,IAAIpa,KAC/CklB,EAAUI,oBAAoBrZ,GAEtCkZ,IACX,GAAE,EAeEI,GAAc,YACdC,GAAc,QAAQD,KACtBE,GAAe,SAASF,KAO9B,MAAMG,WAAcvB,GAEPzI,kBACT,MAdW,OAeb,CAGAiK,QAGE,GAFmBnG,GAAakB,QAAQzB,KAAKoF,SAAUmB,IAExC1E,iBACb,OAGF7B,KAAKoF,SAAS5J,UAAUuH,OAnBF,QAqBtB,MAAM6C,EAAa5F,KAAKoF,SAAS5J,UAAUvW,SAtBrB,QAwBtB+a,KAAK2F,gBAAe,IAAM3F,KAAK2G,mBAAmB3G,KAAKoF,SAAUQ,EACnE,CAGAe,kBACE3G,KAAKoF,SAASrC,SAEdxC,GAAakB,QAAQzB,KAAKoF,SAAUoB,IACpCxG,KAAKuF,SACP,CAGAM,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAO2b,GAAMJ,oBAAoBrG,MAEvC,GAAsB,iBAAXqE,EAAX,CAIA,QAAqB7K,IAAjB1O,EAAKuZ,IAAyBA,EAAOlK,WAAW,MAAmB,gBAAXkK,EAC1D,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,GAAQrE,KANb,CAOF,GACF,EAQFgG,GAAqBS,GAAO,SAK5BpK,GAAmBoK,IAYnB,MAKMI,GAAyB,4BAM/B,MAAMC,WAAe5B,GAERzI,kBACT,MAdW,QAeb,CAGAsK,SAEE/G,KAAKoF,SAASvjB,aAAa,eAAgBme,KAAKoF,SAAS5J,UAAUuL,OAhB3C,UAiB1B,CAGAlB,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOgc,GAAOT,oBAAoBrG,MAEzB,WAAXqE,GACFvZ,EAAKuZ,IAET,GACF,EAQF9D,GAAaY,GAAGrb,SAlCe,2BAkCmB+gB,IAAwBzH,IACxEA,EAAM+C,iBACN,MAAM6E,EAAS5H,EAAMpS,OAAOmO,QAAQ0L,IACvBC,GAAOT,oBAAoBW,GACnCD,QAAQ,IAMf1K,GAAmByK,IAYnB,MAAMG,GAAiB,CACrBrU,KAAI,CAACmH,EAAU/Z,EAAU8F,SAASC,kBACzB,GAAG3G,UAAUsB,QAAQ3C,UAAUqiB,iBAAiBniB,KAAK+B,EAAS+Z,IAGvEmN,QAAO,CAACnN,EAAU/Z,EAAU8F,SAASC,kBAC5BrF,QAAQ3C,UAAU8K,cAAc5K,KAAK+B,EAAS+Z,GAGvDoN,SAAQ,CAACnnB,EAAS+Z,IACT,GAAG3a,UAAUY,EAAQmnB,UAAUvgB,QAAOzB,GAASA,EAAMiiB,QAAQrN,KAGtEsN,QAAQrnB,EAAS+Z,GACf,MAAMsN,EAAU,GAChB,IAAIC,EAAWtnB,EAAQwF,WAAW2V,QAAQpB,GAE1C,KAAOuN,GACLD,EAAQhV,KAAKiV,GACbA,EAAWA,EAAS9hB,WAAW2V,QAAQpB,GAGzC,OAAOsN,CACT,EAEAE,KAAKvnB,EAAS+Z,GACZ,IAAIyN,EAAWxnB,EAAQynB,uBAEvB,KAAOD,GAAU,CACf,GAAIA,EAASJ,QAAQrN,GACnB,MAAO,CAACyN,GAGVA,EAAWA,EAASC,sBACtB,CAEA,MAAO,EACT,EAGAniB,KAAKtF,EAAS+Z,GACZ,IAAIzU,EAAOtF,EAAQ0nB,mBAEnB,KAAOpiB,GAAM,CACX,GAAIA,EAAK8hB,QAAQrN,GACf,MAAO,CAACzU,GAGVA,EAAOA,EAAKoiB,kBACd,CAEA,MAAO,EACT,EAEAC,kBAAkB3nB,GAChB,MAAM4nB,EAAa,CAAC,IAAK,SAAU,QAAS,WAAY,SAAU,UAAW,aAAc,4BAA4BrkB,KAAIwW,GAAY,GAAGA,2BAAiCpW,KAAK,KAChL,OAAOqc,KAAKpN,KAAKgV,EAAY5nB,GAAS4G,QAAOihB,IAAOxM,GAAWwM,IAAO/M,GAAU+M,IAClF,GAeIC,GAAc,YACdC,GAAmB,aAAaD,KAChCE,GAAkB,YAAYF,KAC9BG,GAAiB,WAAWH,KAC5BI,GAAoB,cAAcJ,KAClCK,GAAkB,YAAYL,KAK9BM,GAAY,CAChBC,YAAa,KACbC,aAAc,KACdC,cAAe,MAEXC,GAAgB,CACpBH,YAAa,kBACbC,aAAc,kBACdC,cAAe,mBAMjB,MAAME,WAAczE,GAClBU,YAAY1kB,EAASqkB,GACnBc,QACAnF,KAAKoF,SAAWplB,EAEXA,GAAYyoB,GAAMC,gBAIvB1I,KAAKqF,QAAUrF,KAAKoE,WAAWC,GAC/BrE,KAAK2I,QAAU,EACf3I,KAAK4I,sBAAwB9H,QAAQzgB,OAAOwoB,cAE5C7I,KAAK8I,cACP,CAGW7E,qBACT,OAAOmE,EACT,CAEWlE,yBACT,OAAOsE,EACT,CAEW/L,kBACT,MAnDW,OAoDb,CAGA8I,UACEhF,GAAaC,IAAIR,KAAKoF,SAAU0C,GAClC,CAGAiB,OAAO3J,GACAY,KAAK4I,sBAKN5I,KAAKgJ,wBAAwB5J,KAC/BY,KAAK2I,QAAUvJ,EAAM6J,SALrBjJ,KAAK2I,QAAUvJ,EAAM8J,QAAQ,GAAGD,OAOpC,CAEAE,KAAK/J,GACCY,KAAKgJ,wBAAwB5J,KAC/BY,KAAK2I,QAAUvJ,EAAM6J,QAAUjJ,KAAK2I,SAGtC3I,KAAKoJ,eAELrM,GAAQiD,KAAKqF,QAAQgD,YACvB,CAEAgB,MAAMjK,GACJY,KAAK2I,QAAUvJ,EAAM8J,SAAW9J,EAAM8J,QAAQ/X,OAAS,EAAI,EAAIiO,EAAM8J,QAAQ,GAAGD,QAAUjJ,KAAK2I,OACjG,CAEAS,eACE,MAAME,EAAY1mB,KAAKoC,IAAIgb,KAAK2I,SAEhC,GAAIW,GA9EgB,GA+ElB,OAGF,MAAMvb,EAAYub,EAAYtJ,KAAK2I,QACnC3I,KAAK2I,QAAU,EAEV5a,GAILgP,GAAQhP,EAAY,EAAIiS,KAAKqF,QAAQkD,cAAgBvI,KAAKqF,QAAQiD,aACpE,CAEAQ,cACM9I,KAAK4I,uBACPrI,GAAaY,GAAGnB,KAAKoF,SAAU8C,IAAmB9I,GAASY,KAAK+I,OAAO3J,KACvEmB,GAAaY,GAAGnB,KAAKoF,SAAU+C,IAAiB/I,GAASY,KAAKmJ,KAAK/J,KAEnEY,KAAKoF,SAAS5J,UAAUtE,IAlGG,mBAoG3BqJ,GAAaY,GAAGnB,KAAKoF,SAAU2C,IAAkB3I,GAASY,KAAK+I,OAAO3J,KACtEmB,GAAaY,GAAGnB,KAAKoF,SAAU4C,IAAiB5I,GAASY,KAAKqJ,MAAMjK,KACpEmB,GAAaY,GAAGnB,KAAKoF,SAAU6C,IAAgB7I,GAASY,KAAKmJ,KAAK/J,KAEtE,CAEA4J,wBAAwB5J,GACtB,OAAOY,KAAK4I,wBA5GS,QA4GiBxJ,EAAMmK,aA7GrB,UA6GyDnK,EAAMmK,YACxF,CAGA1D,qBACE,MAAO,iBAAkB/f,SAASC,iBAAmB7C,UAAUsmB,eAAiB,CAClF,EAcF,MAEMC,GAAc,eACdC,GAAiB,YAKjBC,GAAa,OACbC,GAAa,OACbC,GAAiB,OACjBC,GAAkB,QAClBC,GAAc,QAAQN,KACtBO,GAAa,OAAOP,KACpBQ,GAAkB,UAAUR,KAC5BS,GAAqB,aAAaT,KAClCU,GAAqB,aAAaV,KAClCW,GAAmB,YAAYX,KAC/BY,GAAwB,OAAOZ,KAAcC,KAC7CY,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAsB,WACtBC,GAAsB,SAMtBC,GAAkB,UAClBC,GAAgB,iBAChBC,GAAuBF,GAAkBC,GAKzCE,GAAmB,CACvB,UAAoBd,GACpB,WAAqBD,IAEjBgB,GAAY,CAChBC,SAAU,IACVC,UAAU,EACVC,MAAO,QACPC,MAAM,EACNC,OAAO,EACPC,MAAM,GAEFC,GAAgB,CACpBN,SAAU,mBAEVC,SAAU,UACVC,MAAO,mBACPC,KAAM,mBACNC,MAAO,UACPC,KAAM,WAMR,MAAME,WAAiBnG,GACrBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAKsL,UAAY,KACjBtL,KAAKuL,eAAiB,KACtBvL,KAAKwL,YAAa,EAClBxL,KAAKyL,aAAe,KACpBzL,KAAK0L,aAAe,KACpB1L,KAAK2L,mBAAqB1E,GAAeC,QApCjB,uBAoC8ClH,KAAKoF,UAE3EpF,KAAK4L,qBAED5L,KAAKqF,QAAQ4F,OAASV,IACxBvK,KAAK6L,OAET,CAGW5H,qBACT,OAAO4G,EACT,CAEW3G,yBACT,OAAOkH,EACT,CAEW3O,kBACT,MAtFW,UAuFb,CAGAnX,OACE0a,KAAK8L,OAAOnC,GACd,CAEAoC,mBAIOjmB,SAASkmB,QAAUlR,GAAUkF,KAAKoF,WACrCpF,KAAK1a,MAET,CAEAiiB,OACEvH,KAAK8L,OAAOlC,GACd,CAEAoB,QACMhL,KAAKwL,YACPjR,GAAqByF,KAAKoF,UAG5BpF,KAAKiM,gBACP,CAEAJ,QACE7L,KAAKiM,iBAELjM,KAAKkM,kBAELlM,KAAKsL,UAAYa,aAAY,IAAMnM,KAAK+L,mBAAmB/L,KAAKqF,QAAQyF,SAC1E,CAEAsB,oBACOpM,KAAKqF,QAAQ4F,OAIdjL,KAAKwL,WACPjL,GAAaa,IAAIpB,KAAKoF,SAAU4E,IAAY,IAAMhK,KAAK6L,UAIzD7L,KAAK6L,QACP,CAEAQ,GAAGnT,GACD,MAAMoT,EAAQtM,KAAKuM,YAEnB,GAAIrT,EAAQoT,EAAMnb,OAAS,GAAK+H,EAAQ,EACtC,OAGF,GAAI8G,KAAKwL,WAEP,YADAjL,GAAaa,IAAIpB,KAAKoF,SAAU4E,IAAY,IAAMhK,KAAKqM,GAAGnT,KAI5D,MAAMsT,EAAcxM,KAAKyM,cAAczM,KAAK0M,cAE5C,GAAIF,IAAgBtT,EAClB,OAGF,MAAMtC,EAAQsC,EAAQsT,EAAc7C,GAAaC,GAEjD5J,KAAK8L,OAAOlV,EAAO0V,EAAMpT,GAC3B,CAEAqM,UACMvF,KAAK0L,cACP1L,KAAK0L,aAAanG,UAGpBJ,MAAMI,SACR,CAGAhB,kBAAkBF,GAEhB,OADAA,EAAOsI,gBAAkBtI,EAAOyG,SACzBzG,CACT,CAEAuH,qBACM5L,KAAKqF,QAAQ0F,UACfxK,GAAaY,GAAGnB,KAAKoF,SAAU6E,IAAiB7K,GAASY,KAAK4M,SAASxN,KAG9C,UAAvBY,KAAKqF,QAAQ2F,QACfzK,GAAaY,GAAGnB,KAAKoF,SAAU8E,IAAoB,IAAMlK,KAAKgL,UAC9DzK,GAAaY,GAAGnB,KAAKoF,SAAU+E,IAAoB,IAAMnK,KAAKoM,uBAG5DpM,KAAKqF,QAAQ6F,OAASzC,GAAMC,eAC9B1I,KAAK6M,yBAET,CAEAA,0BACE,IAAK,MAAMC,KAAO7F,GAAerU,KA/JX,qBA+JmCoN,KAAKoF,UAC5D7E,GAAaY,GAAG2L,EAAK1C,IAAkBhL,GAASA,EAAM+C,mBAGxD,MAqBM4K,EAAc,CAClBzE,aAAc,IAAMtI,KAAK8L,OAAO9L,KAAKgN,kBAAkBnD,KACvDtB,cAAe,IAAMvI,KAAK8L,OAAO9L,KAAKgN,kBAAkBlD,KACxDzB,YAxBkB,KACS,UAAvBrI,KAAKqF,QAAQ2F,QAWjBhL,KAAKgL,QAEDhL,KAAKyL,cACPwB,aAAajN,KAAKyL,cAGpBzL,KAAKyL,aAAe5N,YAAW,IAAMmC,KAAKoM,qBA7MjB,IA6M+DpM,KAAKqF,QAAQyF,UAAS,GAQhH9K,KAAK0L,aAAe,IAAIjD,GAAMzI,KAAKoF,SAAU2H,EAC/C,CAEAH,SAASxN,GACP,GAAI,kBAAkBtb,KAAKsb,EAAMpS,OAAOoZ,SACtC,OAGF,MAAMrY,EAAY6c,GAAiBxL,EAAM7hB,KAErCwQ,IACFqR,EAAM+C,iBAENnC,KAAK8L,OAAO9L,KAAKgN,kBAAkBjf,IAEvC,CAEA0e,cAAczsB,GACZ,OAAOggB,KAAKuM,YAAY3mB,QAAQ5F,EAClC,CAEAktB,2BAA2BhU,GACzB,IAAK8G,KAAK2L,mBACR,OAGF,MAAMwB,EAAkBlG,GAAeC,QAAQuD,GAAiBzK,KAAK2L,oBACrEwB,EAAgB3R,UAAUuH,OAAOyH,IACjC2C,EAAgBvrB,gBAAgB,gBAChC,MAAMwrB,EAAqBnG,GAAeC,QAAQ,sBAAsBhO,MAAW8G,KAAK2L,oBAEpFyB,IACFA,EAAmB5R,UAAUtE,IAAIsT,IACjC4C,EAAmBvrB,aAAa,eAAgB,QAEpD,CAEAqqB,kBACE,MAAMlsB,EAAUggB,KAAKuL,gBAAkBvL,KAAK0M,aAE5C,IAAK1sB,EACH,OAGF,MAAMqtB,EAAkB9P,OAAO+P,SAASttB,EAAQga,aAAa,oBAAqB,IAClFgG,KAAKqF,QAAQyF,SAAWuC,GAAmBrN,KAAKqF,QAAQsH,eAC1D,CAEAb,OAAOlV,EAAO5W,EAAU,MACtB,GAAIggB,KAAKwL,WACP,OAGF,MAAMzN,EAAgBiC,KAAK0M,aAErBa,EAAS3W,IAAU+S,GACnB6D,EAAcxtB,GAAW8d,GAAqBkC,KAAKuM,YAAaxO,EAAewP,EAAQvN,KAAKqF,QAAQ8F,MAE1G,GAAIqC,IAAgBzP,EAClB,OAGF,MAAM0P,EAAmBzN,KAAKyM,cAAce,GAEtCE,EAAeC,GACZpN,GAAakB,QAAQzB,KAAKoF,SAAUuI,EAAW,CACpD7N,cAAe0N,EACfzf,UAAWiS,KAAK4N,kBAAkBhX,GAClCkM,KAAM9C,KAAKyM,cAAc1O,GACzBsO,GAAIoB,IAMR,GAFmBC,EAAa3D,IAEjBlI,iBACb,OAGF,IAAK9D,IAAkByP,EAGrB,OAGF,MAAMK,EAAY/M,QAAQd,KAAKsL,WAC/BtL,KAAKgL,QACLhL,KAAKwL,YAAa,EAElBxL,KAAKkN,2BAA2BO,GAEhCzN,KAAKuL,eAAiBiC,EACtB,MAAMM,EAAuBP,EA/RR,sBADF,oBAiSbQ,EAAiBR,EA/RH,qBACA,qBA+RpBC,EAAYhS,UAAUtE,IAAI6W,GAC1BhS,GAAOyR,GACPzP,EAAcvC,UAAUtE,IAAI4W,GAC5BN,EAAYhS,UAAUtE,IAAI4W,GAU1B9N,KAAK2F,gBARoB,KACvB6H,EAAYhS,UAAUuH,OAAO+K,EAAsBC,GACnDP,EAAYhS,UAAUtE,IAAIsT,IAC1BzM,EAAcvC,UAAUuH,OAAOyH,GAAqBuD,EAAgBD,GACpE9N,KAAKwL,YAAa,EAClBkC,EAAa1D,GAAW,GAGYjM,EAAeiC,KAAKgO,eAEtDH,GACF7N,KAAK6L,OAET,CAEAmC,cACE,OAAOhO,KAAKoF,SAAS5J,UAAUvW,SAxTV,QAyTvB,CAEAynB,aACE,OAAOzF,GAAeC,QAAQyD,GAAsB3K,KAAKoF,SAC3D,CAEAmH,YACE,OAAOtF,GAAerU,KAAK8X,GAAe1K,KAAKoF,SACjD,CAEA6G,iBACMjM,KAAKsL,YACP2C,cAAcjO,KAAKsL,WACnBtL,KAAKsL,UAAY,KAErB,CAEA0B,kBAAkBjf,GAChB,OAAIoO,KACKpO,IAAc8b,GAAiBD,GAAaD,GAG9C5b,IAAc8b,GAAiBF,GAAaC,EACrD,CAEAgE,kBAAkBhX,GAChB,OAAIuF,KACKvF,IAAUgT,GAAaC,GAAiBC,GAG1ClT,IAAUgT,GAAaE,GAAkBD,EAClD,CAGAhE,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOugB,GAAShF,oBAAoBrG,KAAMqE,GAEhD,GAAsB,iBAAXA,GAKX,GAAsB,iBAAXA,EAAqB,CAC9B,QAAqB7K,IAAjB1O,EAAKuZ,IAAyBA,EAAOlK,WAAW,MAAmB,gBAAXkK,EAC1D,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IACP,OAVEvZ,EAAKuhB,GAAGhI,EAWZ,GACF,EAQF9D,GAAaY,GAAGrb,SAAUwkB,GA1WE,uCA0W2C,SAAUlL,GAC/E,MAAMpS,EAASsN,GAAuB0F,MAEtC,IAAKhT,IAAWA,EAAOwO,UAAUvW,SAASslB,IACxC,OAGFnL,EAAM+C,iBACN,MAAM+L,EAAW7C,GAAShF,oBAAoBrZ,GACxCmhB,EAAanO,KAAKhG,aAAa,oBAErC,OAAImU,GACFD,EAAS7B,GAAG8B,QAEZD,EAAS9B,qBAKyC,SAAhD7I,GAAYQ,iBAAiB/D,KAAM,UACrCkO,EAAS5oB,YAET4oB,EAAS9B,sBAKX8B,EAAS3G,YAET2G,EAAS9B,oBACX,IACA7L,GAAaY,GAAG9gB,OAAQgqB,IAAuB,KAC7C,MAAM+D,EAAYnH,GAAerU,KAzYR,6BA2YzB,IAAK,MAAMsb,KAAYE,EACrB/C,GAAShF,oBAAoB6H,EAC/B,IAMF7R,GAAmBgP,IAYnB,MAEMgD,GAAc,eAEdC,GAAe,OAAOD,KACtBE,GAAgB,QAAQF,KACxBG,GAAe,OAAOH,KACtBI,GAAiB,SAASJ,KAC1BK,GAAyB,QAAQL,cACjCM,GAAoB,OACpBC,GAAsB,WACtBC,GAAwB,aAExBC,GAA6B,WAAWF,OAAwBA,KAKhEG,GAAyB,8BACzBC,GAAY,CAChB9pB,OAAQ,KACR6hB,QAAQ,GAEJkI,GAAgB,CACpB/pB,OAAQ,iBACR6hB,OAAQ,WAMV,MAAMmI,WAAiBhK,GACrBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAKmP,kBAAmB,EACxBnP,KAAKoP,cAAgB,GACrB,MAAMC,EAAapI,GAAerU,KAAKmc,IAEvC,IAAK,MAAMO,KAAQD,EAAY,CAC7B,MAAMtV,EAAWM,GAAuBiV,GAClCC,EAAgBtI,GAAerU,KAAKmH,GAAUnT,QAAO4oB,GAAgBA,IAAiBxP,KAAKoF,WAEhF,OAAbrL,GAAqBwV,EAAcpe,QACrC6O,KAAKoP,cAAc/c,KAAKid,EAE5B,CAEAtP,KAAKyP,sBAEAzP,KAAKqF,QAAQngB,QAChB8a,KAAK0P,0BAA0B1P,KAAKoP,cAAepP,KAAK2P,YAGtD3P,KAAKqF,QAAQ0B,QACf/G,KAAK+G,QAET,CAGW9C,qBACT,OAAO+K,EACT,CAEW9K,yBACT,OAAO+K,EACT,CAEWxS,kBACT,MApEW,UAqEb,CAGAsK,SACM/G,KAAK2P,WACP3P,KAAK4P,OAEL5P,KAAK6P,MAET,CAEAA,OACE,GAAI7P,KAAKmP,kBAAoBnP,KAAK2P,WAChC,OAGF,IAAIG,EAAiB,GAQrB,GANI9P,KAAKqF,QAAQngB,SACf4qB,EAAiB9P,KAAK+P,uBAvEH,wCAuE4CnpB,QAAO5G,GAAWA,IAAYggB,KAAKoF,WAAU7hB,KAAIvD,GAAWkvB,GAAS7I,oBAAoBrmB,EAAS,CAC/J+mB,QAAQ,OAIR+I,EAAe3e,QAAU2e,EAAe,GAAGX,iBAC7C,OAKF,GAFmB5O,GAAakB,QAAQzB,KAAKoF,SAAUkJ,IAExCzM,iBACb,OAGF,IAAK,MAAMmO,KAAkBF,EAC3BE,EAAeJ,OAGjB,MAAMK,EAAYjQ,KAAKkQ,gBAEvBlQ,KAAKoF,SAAS5J,UAAUuH,OAAO6L,IAE/B5O,KAAKoF,SAAS5J,UAAUtE,IAAI2X,IAE5B7O,KAAKoF,SAAS5jB,MAAMyuB,GAAa,EAEjCjQ,KAAK0P,0BAA0B1P,KAAKoP,eAAe,GAEnDpP,KAAKmP,kBAAmB,EAExB,MAYMgB,EAAa,SADUF,EAAU,GAAGhL,cAAgBgL,EAAUpd,MAAM,KAG1EmN,KAAK2F,gBAdY,KACf3F,KAAKmP,kBAAmB,EAExBnP,KAAKoF,SAAS5J,UAAUuH,OAAO8L,IAE/B7O,KAAKoF,SAAS5J,UAAUtE,IAAI0X,GAAqBD,IAEjD3O,KAAKoF,SAAS5jB,MAAMyuB,GAAa,GACjC1P,GAAakB,QAAQzB,KAAKoF,SAAUmJ,GAAc,GAMtBvO,KAAKoF,UAAU,GAE7CpF,KAAKoF,SAAS5jB,MAAMyuB,GAAa,GAAGjQ,KAAKoF,SAAS+K,MACpD,CAEAP,OACE,GAAI5P,KAAKmP,mBAAqBnP,KAAK2P,WACjC,OAKF,GAFmBpP,GAAakB,QAAQzB,KAAKoF,SAAUoJ,IAExC3M,iBACb,OAGF,MAAMoO,EAAYjQ,KAAKkQ,gBAEvBlQ,KAAKoF,SAAS5jB,MAAMyuB,GAAa,GAAGjQ,KAAKoF,SAASrhB,wBAAwBksB,OAC1ElU,GAAOiE,KAAKoF,UAEZpF,KAAKoF,SAAS5J,UAAUtE,IAAI2X,IAE5B7O,KAAKoF,SAAS5J,UAAUuH,OAAO6L,GAAqBD,IAEpD,IAAK,MAAMlN,KAAWzB,KAAKoP,cAAe,CACxC,MAAMpvB,EAAUsa,GAAuBmH,GAEnCzhB,IAAYggB,KAAK2P,SAAS3vB,IAC5BggB,KAAK0P,0BAA0B,CAACjO,IAAU,EAE9C,CAEAzB,KAAKmP,kBAAmB,EAYxBnP,KAAKoF,SAAS5jB,MAAMyuB,GAAa,GAEjCjQ,KAAK2F,gBAZY,KACf3F,KAAKmP,kBAAmB,EAExBnP,KAAKoF,SAAS5J,UAAUuH,OAAO8L,IAE/B7O,KAAKoF,SAAS5J,UAAUtE,IAAI0X,IAE5BrO,GAAakB,QAAQzB,KAAKoF,SAAUqJ,GAAe,GAKvBzO,KAAKoF,UAAU,EAC/C,CAEAuK,SAAS3vB,EAAUggB,KAAKoF,UACtB,OAAOplB,EAAQwb,UAAUvW,SAAS0pB,GACpC,CAGApK,kBAAkBF,GAIhB,OAHAA,EAAO0C,OAASjG,QAAQuD,EAAO0C,QAE/B1C,EAAOnf,OAAS2V,GAAWwJ,EAAOnf,QAC3Bmf,CACT,CAEA6L,gBACE,OAAOlQ,KAAKoF,SAAS5J,UAAUvW,SAtLL,uBAChB,QACC,QAqLb,CAEAwqB,sBACE,IAAKzP,KAAKqF,QAAQngB,OAChB,OAGF,MAAMiiB,EAAWnH,KAAK+P,uBAAuBhB,IAE7C,IAAK,MAAM/uB,KAAWmnB,EAAU,CAC9B,MAAMiJ,EAAW9V,GAAuBta,GAEpCowB,GACFpQ,KAAK0P,0BAA0B,CAAC1vB,GAAUggB,KAAK2P,SAASS,GAE5D,CACF,CAEAL,uBAAuBhW,GACrB,MAAMoN,EAAWF,GAAerU,KAAKkc,GAA4B9O,KAAKqF,QAAQngB,QAE9E,OAAO+hB,GAAerU,KAAKmH,EAAUiG,KAAKqF,QAAQngB,QAAQ0B,QAAO5G,IAAYmnB,EAASjN,SAASla,IACjG,CAEA0vB,0BAA0BW,EAAcC,GACtC,GAAKD,EAAalf,OAIlB,IAAK,MAAMnR,KAAWqwB,EACpBrwB,EAAQwb,UAAUuL,OAvNK,aAuNyBuJ,GAChDtwB,EAAQ6B,aAAa,gBAAiByuB,EAE1C,CAGAzK,uBAAuBxB,GACrB,MAAMgB,EAAU,CAAC,EAMjB,MAJsB,iBAAXhB,GAAuB,YAAYvgB,KAAKugB,KACjDgB,EAAQ0B,QAAS,GAGZ/G,KAAK4G,MAAK,WACf,MAAM9b,EAAOokB,GAAS7I,oBAAoBrG,KAAMqF,GAEhD,GAAsB,iBAAXhB,EAAqB,CAC9B,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IACP,CACF,GACF,EAQF9D,GAAaY,GAAGrb,SAAU4oB,GAAwBK,IAAwB,SAAU3P,IAErD,MAAzBA,EAAMpS,OAAOoZ,SAAmBhH,EAAMW,gBAAmD,MAAjCX,EAAMW,eAAeqG,UAC/EhH,EAAM+C,iBAGR,MAAMpI,EAAWM,GAAuB2F,MAClCuQ,EAAmBtJ,GAAerU,KAAKmH,GAE7C,IAAK,MAAM/Z,KAAWuwB,EACpBrB,GAAS7I,oBAAoBrmB,EAAS,CACpC+mB,QAAQ,IACPA,QAEP,IAKA1K,GAAmB6S,IAYnB,MAAMsB,GAAS,WAETC,GAAc,eACdC,GAAiB,YAGjBC,GAAiB,UACjBC,GAAmB,YAGnBC,GAAe,OAAOJ,KACtBK,GAAiB,SAASL,KAC1BM,GAAe,OAAON,KACtBO,GAAgB,QAAQP,KACxBQ,GAAyB,QAAQR,KAAcC,KAC/CQ,GAAyB,UAAUT,KAAcC,KACjDS,GAAuB,QAAQV,KAAcC,KAC7CU,GAAoB,OAMpBC,GAAyB,4DACzBC,GAA6B,GAAGD,MAA0BD,KAC1DG,GAAgB,iBAIhBC,GAAgBrV,KAAU,UAAY,YACtCsV,GAAmBtV,KAAU,YAAc,UAC3CuV,GAAmBvV,KAAU,aAAe,eAC5CwV,GAAsBxV,KAAU,eAAiB,aACjDyV,GAAkBzV,KAAU,aAAe,cAC3C0V,GAAiB1V,KAAU,cAAgB,aAG3C2V,GAAY,CAChBC,WAAW,EACXrjB,SAAU,kBACVsjB,QAAS,UACTvpB,OAAQ,CAAC,EAAG,GACZwpB,aAAc,KACdlzB,UAAW,UAEPmzB,GAAgB,CACpBH,UAAW,mBACXrjB,SAAU,mBACVsjB,QAAS,SACTvpB,OAAQ,0BACRwpB,aAAc,yBACdlzB,UAAW,2BAMb,MAAMozB,WAAiBjN,GACrBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAKoS,QAAU,KACfpS,KAAKqS,QAAUrS,KAAKoF,SAAS5f,WAG7Bwa,KAAKsS,MAAQrL,GAAe3hB,KAAK0a,KAAKoF,SAAUmM,IAAe,IAAMtK,GAAeM,KAAKvH,KAAKoF,SAAUmM,IAAe,IAAMtK,GAAeC,QAAQqK,GAAevR,KAAKqS,SACxKrS,KAAKuS,UAAYvS,KAAKwS,eACxB,CAGWvO,qBACT,OAAO6N,EACT,CAEW5N,yBACT,OAAOgO,EACT,CAEWzV,kBACT,OAAO+T,EACT,CAGAzJ,SACE,OAAO/G,KAAK2P,WAAa3P,KAAK4P,OAAS5P,KAAK6P,MAC9C,CAEAA,OACE,GAAIxU,GAAW2E,KAAKoF,WAAapF,KAAK2P,WACpC,OAGF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAKoF,UAItB,IAFkB7E,GAAakB,QAAQzB,KAAKoF,SAAU2L,GAAcjR,GAEtD+B,iBAAd,CAUA,GANA7B,KAAKyS,gBAMD,iBAAkB3sB,SAASC,kBAAoBia,KAAKqS,QAAQlX,QA/ExC,eAgFtB,IAAK,MAAMnb,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAKwa,UAC/C5G,GAAaY,GAAGnhB,EAAS,YAAa8b,IAI1CkE,KAAKoF,SAASsN,QAEd1S,KAAKoF,SAASvjB,aAAa,iBAAiB,GAE5Cme,KAAKsS,MAAM9W,UAAUtE,IAAIka,IAEzBpR,KAAKoF,SAAS5J,UAAUtE,IAAIka,IAE5B7Q,GAAakB,QAAQzB,KAAKoF,SAAU4L,GAAelR,EAtBnD,CAuBF,CAEA8P,OACE,GAAIvU,GAAW2E,KAAKoF,YAAcpF,KAAK2P,WACrC,OAGF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAKoF,UAGtBpF,KAAK2S,cAAc7S,EACrB,CAEAyF,UACMvF,KAAKoS,SACPpS,KAAKoS,QAAQ3Y,UAGf0L,MAAMI,SACR,CAEA/Z,SACEwU,KAAKuS,UAAYvS,KAAKwS,gBAElBxS,KAAKoS,SACPpS,KAAKoS,QAAQ5mB,QAEjB,CAGAmnB,cAAc7S,GAGZ,IAFkBS,GAAakB,QAAQzB,KAAKoF,SAAUyL,GAAc/Q,GAEtD+B,iBAAd,CAMA,GAAI,iBAAkB/b,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAKwa,UAC/C5G,GAAaC,IAAIxgB,EAAS,YAAa8b,IAIvCkE,KAAKoS,SACPpS,KAAKoS,QAAQ3Y,UAGfuG,KAAKsS,MAAM9W,UAAUuH,OAAOqO,IAE5BpR,KAAKoF,SAAS5J,UAAUuH,OAAOqO,IAE/BpR,KAAKoF,SAASvjB,aAAa,gBAAiB,SAE5C0hB,GAAYE,oBAAoBzD,KAAKsS,MAAO,UAC5C/R,GAAakB,QAAQzB,KAAKoF,SAAU0L,GAAgBhR,EArBpD,CAsBF,CAEAsE,WAAWC,GAGT,GAAgC,iBAFhCA,EAASc,MAAMf,WAAWC,IAERtlB,YAA2B,GAAUslB,EAAOtlB,YAAgE,mBAA3CslB,EAAOtlB,UAAUgF,sBAElG,MAAM,IAAIihB,UAAU,GAAGwL,GAAOvL,+GAGhC,OAAOZ,CACT,CAEAoO,gBACE,QAAsB,IAAX,EACT,MAAM,IAAIzN,UAAU,gEAGtB,IAAI4N,EAAmB5S,KAAKoF,SAEG,WAA3BpF,KAAKqF,QAAQtmB,UACf6zB,EAAmB5S,KAAKqS,QACf,GAAUrS,KAAKqF,QAAQtmB,WAChC6zB,EAAmB/X,GAAWmF,KAAKqF,QAAQtmB,WACA,iBAA3BihB,KAAKqF,QAAQtmB,YAC7B6zB,EAAmB5S,KAAKqF,QAAQtmB,WAGlC,MAAMkzB,EAAejS,KAAK6S,mBAE1B7S,KAAKoS,QAAU,GAAoBQ,EAAkB5S,KAAKsS,MAAOL,EACnE,CAEAtC,WACE,OAAO3P,KAAKsS,MAAM9W,UAAUvW,SAASmsB,GACvC,CAEA0B,gBACE,MAAMC,EAAiB/S,KAAKqS,QAE5B,GAAIU,EAAevX,UAAUvW,SAxMN,WAyMrB,OAAO2sB,GAGT,GAAImB,EAAevX,UAAUvW,SA3MJ,aA4MvB,OAAO4sB,GAGT,GAAIkB,EAAevX,UAAUvW,SA9MA,iBA+M3B,MAjMsB,MAoMxB,GAAI8tB,EAAevX,UAAUvW,SAjNE,mBAkN7B,MApMyB,SAwM3B,MAAM+tB,EAAkF,QAA1EttB,iBAAiBsa,KAAKsS,OAAOrX,iBAAiB,iBAAiBb,OAE7E,OAAI2Y,EAAevX,UAAUvW,SA5NP,UA6Nb+tB,EAAQvB,GAAmBD,GAG7BwB,EAAQrB,GAAsBD,EACvC,CAEAc,gBACE,OAAkD,OAA3CxS,KAAKoF,SAASjK,QA5ND,UA6NtB,CAEA8X,aACE,MAAM,OACJxqB,GACEuX,KAAKqF,QAET,MAAsB,iBAAX5c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAASmf,OAAO+P,SAASlvB,EAAO,MAGzC,mBAAXqK,EACFyqB,GAAczqB,EAAOyqB,EAAYlT,KAAKoF,UAGxC3c,CACT,CAEAoqB,mBACE,MAAMM,EAAwB,CAC5Bh0B,UAAW6gB,KAAK8S,gBAChBjc,UAAW,CAAC,CACV9V,KAAM,kBACNmB,QAAS,CACPwM,SAAUsR,KAAKqF,QAAQ3W,WAExB,CACD3N,KAAM,SACNmB,QAAS,CACPuG,OAAQuX,KAAKiT,iBAcnB,OATIjT,KAAKuS,WAAsC,WAAzBvS,KAAKqF,QAAQ2M,WACjCzO,GAAYC,iBAAiBxD,KAAKsS,MAAO,SAAU,UAEnDa,EAAsBtc,UAAY,CAAC,CACjC9V,KAAM,cACNC,SAAS,KAIN,IAAKmyB,KAC+B,mBAA9BnT,KAAKqF,QAAQ4M,aAA8BjS,KAAKqF,QAAQ4M,aAAakB,GAAyBnT,KAAKqF,QAAQ4M,aAE1H,CAEAmB,iBAAgB,IACd71B,EAAG,OACHyP,IAEA,MAAMsf,EAAQrF,GAAerU,KA/QF,8DA+Q+BoN,KAAKsS,OAAO1rB,QAAO5G,GAAW8a,GAAU9a,KAE7FssB,EAAMnb,QAMX2M,GAAqBwO,EAAOtf,EAAQzP,IAAQqzB,IAAmBtE,EAAMpS,SAASlN,IAAS0lB,OACzF,CAGA7M,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOqnB,GAAS9L,oBAAoBrG,KAAMqE,GAEhD,GAAsB,iBAAXA,EAAX,CAIA,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IANL,CAOF,GACF,CAEAwB,kBAAkBzG,GAChB,GAhUuB,IAgUnBA,EAAM4H,QAAgD,UAAf5H,EAAMqB,MAnUnC,QAmUuDrB,EAAM7hB,IACzE,OAGF,MAAM81B,EAAcpM,GAAerU,KAAK0e,IAExC,IAAK,MAAMvK,KAAUsM,EAAa,CAChC,MAAMC,EAAUnB,GAASrM,YAAYiB,GAErC,IAAKuM,IAAyC,IAA9BA,EAAQjO,QAAQ0M,UAC9B,SAGF,MAAMwB,EAAenU,EAAMmU,eACrBC,EAAeD,EAAarZ,SAASoZ,EAAQhB,OAEnD,GAAIiB,EAAarZ,SAASoZ,EAAQlO,WAA2C,WAA9BkO,EAAQjO,QAAQ0M,YAA2ByB,GAA8C,YAA9BF,EAAQjO,QAAQ0M,WAA2ByB,EACnJ,SAIF,GAAIF,EAAQhB,MAAMrtB,SAASma,EAAMpS,UAA2B,UAAfoS,EAAMqB,MAxVvC,QAwV2DrB,EAAM7hB,KAAqB,qCAAqCuG,KAAKsb,EAAMpS,OAAOoZ,UACvJ,SAGF,MAAMtG,EAAgB,CACpBA,cAAewT,EAAQlO,UAGN,UAAfhG,EAAMqB,OACRX,EAAcqG,WAAa/G,GAG7BkU,EAAQX,cAAc7S,EACxB,CACF,CAEA+F,6BAA6BzG,GAG3B,MAAMqU,EAAU,kBAAkB3vB,KAAKsb,EAAMpS,OAAOoZ,SAC9CsN,EA7WW,WA6WKtU,EAAM7hB,IACtBo2B,EAAkB,CAAChD,GAAgBC,IAAkB1W,SAASkF,EAAM7hB,KAE1E,IAAKo2B,IAAoBD,EACvB,OAGF,GAAID,IAAYC,EACd,OAGFtU,EAAM+C,iBAEN,MAAMyR,EAAkB5T,KAAKoH,QAAQiK,IAA0BrR,KAAOiH,GAAeM,KAAKvH,KAAMqR,IAAwB,IAAMpK,GAAe3hB,KAAK0a,KAAMqR,IAAwB,IAAMpK,GAAeC,QAAQmK,GAAwBjS,EAAMW,eAAeva,YACpPwF,EAAWmnB,GAAS9L,oBAAoBuN,GAE9C,GAAID,EAMF,OALAvU,EAAMyU,kBACN7oB,EAAS6kB,YAET7kB,EAASooB,gBAAgBhU,GAKvBpU,EAAS2kB,aAEXvQ,EAAMyU,kBACN7oB,EAAS4kB,OACTgE,EAAgBlB,QAEpB,EAQFnS,GAAaY,GAAGrb,SAAUorB,GAAwBG,GAAwBc,GAAS2B,uBACnFvT,GAAaY,GAAGrb,SAAUorB,GAAwBK,GAAeY,GAAS2B,uBAC1EvT,GAAaY,GAAGrb,SAAUmrB,GAAwBkB,GAAS4B,YAC3DxT,GAAaY,GAAGrb,SAAUqrB,GAAsBgB,GAAS4B,YACzDxT,GAAaY,GAAGrb,SAAUmrB,GAAwBI,IAAwB,SAAUjS,GAClFA,EAAM+C,iBACNgQ,GAAS9L,oBAAoBrG,MAAM+G,QACrC,IAKA1K,GAAmB8V,IAYnB,MAAM6B,GAAyB,oDACzBC,GAA0B,cAC1BC,GAAmB,gBACnBC,GAAkB,eAKxB,MAAMC,GACJ1P,cACE1E,KAAKoF,SAAWtf,SAAS6G,IAC3B,CAGA0nB,WAEE,MAAMC,EAAgBxuB,SAASC,gBAAgBuC,YAC/C,OAAO1F,KAAKoC,IAAI3E,OAAOk0B,WAAaD,EACtC,CAEA1E,OACE,MAAMtrB,EAAQ0b,KAAKqU,WAEnBrU,KAAKwU,mBAGLxU,KAAKyU,sBAAsBzU,KAAKoF,SAAU8O,IAAkBQ,GAAmBA,EAAkBpwB,IAGjG0b,KAAKyU,sBAAsBT,GAAwBE,IAAkBQ,GAAmBA,EAAkBpwB,IAE1G0b,KAAKyU,sBAAsBR,GAAyBE,IAAiBO,GAAmBA,EAAkBpwB,GAC5G,CAEAwO,QACEkN,KAAK2U,wBAAwB3U,KAAKoF,SAAU,YAE5CpF,KAAK2U,wBAAwB3U,KAAKoF,SAAU8O,IAE5ClU,KAAK2U,wBAAwBX,GAAwBE,IAErDlU,KAAK2U,wBAAwBV,GAAyBE,GACxD,CAEAS,gBACE,OAAO5U,KAAKqU,WAAa,CAC3B,CAGAG,mBACExU,KAAK6U,sBAAsB7U,KAAKoF,SAAU,YAE1CpF,KAAKoF,SAAS5jB,MAAM+K,SAAW,QACjC,CAEAkoB,sBAAsB1a,EAAU+a,EAAevY,GAC7C,MAAMwY,EAAiB/U,KAAKqU,WAa5BrU,KAAKgV,2BAA2Bjb,GAXH/Z,IAC3B,GAAIA,IAAYggB,KAAKoF,UAAY/kB,OAAOk0B,WAAav0B,EAAQsI,YAAcysB,EACzE,OAGF/U,KAAK6U,sBAAsB70B,EAAS80B,GAEpC,MAAMJ,EAAkBr0B,OAAOqF,iBAAiB1F,GAASib,iBAAiB6Z,GAC1E90B,EAAQwB,MAAMyzB,YAAYH,EAAe,GAAGvY,EAASgB,OAAOC,WAAWkX,QAAsB,GAIjG,CAEAG,sBAAsB70B,EAAS80B,GAC7B,MAAMI,EAAcl1B,EAAQwB,MAAMyZ,iBAAiB6Z,GAE/CI,GACF3R,GAAYC,iBAAiBxjB,EAAS80B,EAAeI,EAEzD,CAEAP,wBAAwB5a,EAAU+a,GAahC9U,KAAKgV,2BAA2Bjb,GAZH/Z,IAC3B,MAAM5B,EAAQmlB,GAAYQ,iBAAiB/jB,EAAS80B,GAEtC,OAAV12B,GAKJmlB,GAAYE,oBAAoBzjB,EAAS80B,GACzC90B,EAAQwB,MAAMyzB,YAAYH,EAAe12B,IALvC4B,EAAQwB,MAAM2zB,eAAeL,EAKgB,GAInD,CAEAE,2BAA2Bjb,EAAUqb,GACnC,GAAI,GAAUrb,GACZqb,EAASrb,QAIX,IAAK,MAAMsb,KAAOpO,GAAerU,KAAKmH,EAAUiG,KAAKoF,UACnDgQ,EAASC,EAEb,EAcF,MAAMC,GAAS,WAETC,GAAoB,OACpBC,GAAkB,gBAAgBF,KAClCG,GAAY,CAChBC,UAAW,iBACXC,cAAe,KACf/P,YAAY,EACZ9K,WAAW,EAEX8a,YAAa,QAGTC,GAAgB,CACpBH,UAAW,SACXC,cAAe,kBACf/P,WAAY,UACZ9K,UAAW,UACX8a,YAAa,oBAMf,MAAME,WAAiB9R,GACrBU,YAAYL,GACVc,QACAnF,KAAKqF,QAAUrF,KAAKoE,WAAWC,GAC/BrE,KAAK+V,aAAc,EACnB/V,KAAKoF,SAAW,IAClB,CAGWnB,qBACT,OAAOwR,EACT,CAEWvR,yBACT,OAAO2R,EACT,CAEWpZ,kBACT,OAAO6Y,EACT,CAGAzF,KAAKtT,GACH,IAAKyD,KAAKqF,QAAQvK,UAEhB,YADAiC,GAAQR,GAIVyD,KAAKgW,UAEL,MAAMh2B,EAAUggB,KAAKiW,cAEjBjW,KAAKqF,QAAQO,YACf7J,GAAO/b,GAGTA,EAAQwb,UAAUtE,IAAIqe,IAEtBvV,KAAKkW,mBAAkB,KACrBnZ,GAAQR,EAAS,GAErB,CAEAqT,KAAKrT,GACEyD,KAAKqF,QAAQvK,WAKlBkF,KAAKiW,cAAcza,UAAUuH,OAAOwS,IAEpCvV,KAAKkW,mBAAkB,KACrBlW,KAAKuF,UACLxI,GAAQR,EAAS,KARjBQ,GAAQR,EAUZ,CAEAgJ,UACOvF,KAAK+V,cAIVxV,GAAaC,IAAIR,KAAKoF,SAAUoQ,IAEhCxV,KAAKoF,SAASrC,SAEd/C,KAAK+V,aAAc,EACrB,CAGAE,cACE,IAAKjW,KAAKoF,SAAU,CAClB,MAAM+Q,EAAWrwB,SAASswB,cAAc,OACxCD,EAAST,UAAY1V,KAAKqF,QAAQqQ,UAE9B1V,KAAKqF,QAAQO,YACfuQ,EAAS3a,UAAUtE,IAnGD,QAsGpB8I,KAAKoF,SAAW+Q,CAClB,CAEA,OAAOnW,KAAKoF,QACd,CAEAb,kBAAkBF,GAGhB,OADAA,EAAOuR,YAAc/a,GAAWwJ,EAAOuR,aAChCvR,CACT,CAEA2R,UACE,GAAIhW,KAAK+V,YACP,OAGF,MAAM/1B,EAAUggB,KAAKiW,cAErBjW,KAAKqF,QAAQuQ,YAAYS,OAAOr2B,GAEhCugB,GAAaY,GAAGnhB,EAASw1B,IAAiB,KACxCzY,GAAQiD,KAAKqF,QAAQsQ,cAAc,IAErC3V,KAAK+V,aAAc,CACrB,CAEAG,kBAAkB3Z,GAChBS,GAAuBT,EAAUyD,KAAKiW,cAAejW,KAAKqF,QAAQO,WACpE,EAcF,MAEM0Q,GAAc,gBACdC,GAAkB,UAAUD,KAC5BE,GAAoB,cAAcF,KAGlCG,GAAmB,WACnBC,GAAY,CAChBC,WAAW,EACXC,YAAa,MAGTC,GAAgB,CACpBF,UAAW,UACXC,YAAa,WAMf,MAAME,WAAkB9S,GACtBU,YAAYL,GACVc,QACAnF,KAAKqF,QAAUrF,KAAKoE,WAAWC,GAC/BrE,KAAK+W,WAAY,EACjB/W,KAAKgX,qBAAuB,IAC9B,CAGW/S,qBACT,OAAOyS,EACT,CAEWxS,yBACT,OAAO2S,EACT,CAEWpa,kBACT,MAvCW,WAwCb,CAGAwa,WACMjX,KAAK+W,YAIL/W,KAAKqF,QAAQsR,WACf3W,KAAKqF,QAAQuR,YAAYlE,QAG3BnS,GAAaC,IAAI1a,SAAUwwB,IAE3B/V,GAAaY,GAAGrb,SAAUywB,IAAiBnX,GAASY,KAAKkX,eAAe9X,KACxEmB,GAAaY,GAAGrb,SAAU0wB,IAAmBpX,GAASY,KAAKmX,eAAe/X,KAC1EY,KAAK+W,WAAY,EACnB,CAEAK,aACOpX,KAAK+W,YAIV/W,KAAK+W,WAAY,EACjBxW,GAAaC,IAAI1a,SAAUwwB,IAC7B,CAGAY,eAAe9X,GACb,MAAM,YACJwX,GACE5W,KAAKqF,QAET,GAAIjG,EAAMpS,SAAWlH,UAAYsZ,EAAMpS,SAAW4pB,GAAeA,EAAY3xB,SAASma,EAAMpS,QAC1F,OAGF,MAAM1L,EAAW2lB,GAAeU,kBAAkBiP,GAE1B,IAApBt1B,EAAS6P,OACXylB,EAAYlE,QACH1S,KAAKgX,uBAAyBP,GACvCn1B,EAASA,EAAS6P,OAAS,GAAGuhB,QAE9BpxB,EAAS,GAAGoxB,OAEhB,CAEAyE,eAAe/X,GApFD,QAqFRA,EAAM7hB,MAIVyiB,KAAKgX,qBAAuB5X,EAAMiY,SAAWZ,GAxFzB,UAyFtB,EAcF,MAEMa,GAAc,YAGdC,GAAe,OAAOD,KACtBE,GAAyB,gBAAgBF,KACzCG,GAAiB,SAASH,KAC1BI,GAAe,OAAOJ,KACtBK,GAAgB,QAAQL,KACxBM,GAAiB,SAASN,KAC1BO,GAAsB,gBAAgBP,KACtCQ,GAA0B,oBAAoBR,KAC9CS,GAA0B,kBAAkBT,KAC5CU,GAAyB,QAAQV,cACjCW,GAAkB,aAElBC,GAAoB,OACpBC,GAAoB,eAKpBC,GAAY,CAChBjC,UAAU,EACVzD,OAAO,EACP3H,UAAU,GAENsN,GAAgB,CACpBlC,SAAU,mBACVzD,MAAO,UACP3H,SAAU,WAMZ,MAAMuN,WAAcpT,GAClBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAKuY,QAAUtR,GAAeC,QApBV,gBAoBmClH,KAAKoF,UAC5DpF,KAAKwY,UAAYxY,KAAKyY,sBACtBzY,KAAK0Y,WAAa1Y,KAAK2Y,uBACvB3Y,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAK4Y,WAAa,IAAIxE,GAEtBpU,KAAK4L,oBACP,CAGW3H,qBACT,OAAOmU,EACT,CAEWlU,yBACT,OAAOmU,EACT,CAEW5b,kBACT,MA5DW,OA6Db,CAGAsK,OAAOjH,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CAEA+P,KAAK/P,GACCE,KAAK2P,UAAY3P,KAAKmP,kBAIR5O,GAAakB,QAAQzB,KAAKoF,SAAUsS,GAAc,CAClE5X,kBAGY+B,mBAId7B,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EAExBnP,KAAK4Y,WAAWhJ,OAEhB9pB,SAAS6G,KAAK6O,UAAUtE,IAAI+gB,IAE5BjY,KAAK6Y,gBAEL7Y,KAAKwY,UAAU3I,MAAK,IAAM7P,KAAK8Y,aAAahZ,KAC9C,CAEA8P,OACO5P,KAAK2P,WAAY3P,KAAKmP,mBAIT5O,GAAakB,QAAQzB,KAAKoF,SAAUmS,IAExC1V,mBAId7B,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EAExBnP,KAAK0Y,WAAWtB,aAEhBpX,KAAKoF,SAAS5J,UAAUuH,OAAOmV,IAE/BlY,KAAK2F,gBAAe,IAAM3F,KAAK+Y,cAAc/Y,KAAKoF,SAAUpF,KAAKgO,gBACnE,CAEAzI,UACE,IAAK,MAAMyT,IAAe,CAAC34B,OAAQ2f,KAAKuY,SACtChY,GAAaC,IAAIwY,EAAa1B,IAGhCtX,KAAKwY,UAAUjT,UAEfvF,KAAK0Y,WAAWtB,aAEhBjS,MAAMI,SACR,CAEA0T,eACEjZ,KAAK6Y,eACP,CAGAJ,sBACE,OAAO,IAAI3C,GAAS,CAClBhb,UAAWgG,QAAQd,KAAKqF,QAAQ8Q,UAEhCvQ,WAAY5F,KAAKgO,eAErB,CAEA2K,uBACE,OAAO,IAAI7B,GAAU,CACnBF,YAAa5W,KAAKoF,UAEtB,CAEA0T,aAAahZ,GAENha,SAAS6G,KAAK1H,SAAS+a,KAAKoF,WAC/Btf,SAAS6G,KAAK0pB,OAAOrW,KAAKoF,UAG5BpF,KAAKoF,SAAS5jB,MAAMwwB,QAAU,QAE9BhS,KAAKoF,SAASxjB,gBAAgB,eAE9Boe,KAAKoF,SAASvjB,aAAa,cAAc,GAEzCme,KAAKoF,SAASvjB,aAAa,OAAQ,UAEnCme,KAAKoF,SAASlZ,UAAY,EAC1B,MAAMgtB,EAAYjS,GAAeC,QA3IT,cA2IsClH,KAAKuY,SAE/DW,IACFA,EAAUhtB,UAAY,GAGxB6P,GAAOiE,KAAKoF,UAEZpF,KAAKoF,SAAS5J,UAAUtE,IAAIghB,IAa5BlY,KAAK2F,gBAXsB,KACrB3F,KAAKqF,QAAQqN,OACf1S,KAAK0Y,WAAWzB,WAGlBjX,KAAKmP,kBAAmB,EACxB5O,GAAakB,QAAQzB,KAAKoF,SAAUuS,GAAe,CACjD7X,iBACA,GAGoCE,KAAKuY,QAASvY,KAAKgO,cAC7D,CAEApC,qBACErL,GAAaY,GAAGnB,KAAKoF,SAAU2S,IAAyB3Y,IACtD,GAtLe,WAsLXA,EAAM7hB,IAIV,OAAIyiB,KAAKqF,QAAQ0F,UACf3L,EAAM+C,sBACNnC,KAAK4P,aAIP5P,KAAKmZ,4BAA4B,IAEnC5Y,GAAaY,GAAG9gB,OAAQu3B,IAAgB,KAClC5X,KAAK2P,WAAa3P,KAAKmP,kBACzBnP,KAAK6Y,eACP,IAEFtY,GAAaY,GAAGnB,KAAKoF,SAAU0S,IAAyB1Y,IAEtDmB,GAAaa,IAAIpB,KAAKoF,SAAUyS,IAAqBuB,IAC/CpZ,KAAKoF,WAAahG,EAAMpS,QAAUgT,KAAKoF,WAAagU,EAAOpsB,SAIjC,WAA1BgT,KAAKqF,QAAQ8Q,SAMbnW,KAAKqF,QAAQ8Q,UACfnW,KAAK4P,OANL5P,KAAKmZ,6BAOP,GACA,GAEN,CAEAJ,aACE/Y,KAAKoF,SAAS5jB,MAAMwwB,QAAU,OAE9BhS,KAAKoF,SAASvjB,aAAa,eAAe,GAE1Cme,KAAKoF,SAASxjB,gBAAgB,cAE9Boe,KAAKoF,SAASxjB,gBAAgB,QAE9Boe,KAAKmP,kBAAmB,EAExBnP,KAAKwY,UAAU5I,MAAK,KAClB9pB,SAAS6G,KAAK6O,UAAUuH,OAAOkV,IAE/BjY,KAAKqZ,oBAELrZ,KAAK4Y,WAAW9lB,QAEhByN,GAAakB,QAAQzB,KAAKoF,SAAUqS,GAAe,GAEvD,CAEAzJ,cACE,OAAOhO,KAAKoF,SAAS5J,UAAUvW,SAtOT,OAuOxB,CAEAk0B,6BAGE,GAFkB5Y,GAAakB,QAAQzB,KAAKoF,SAAUoS,IAExC3V,iBACZ,OAGF,MAAMyX,EAAqBtZ,KAAKoF,SAAStX,aAAehI,SAASC,gBAAgBsC,aAC3EkxB,EAAmBvZ,KAAKoF,SAAS5jB,MAAMiL,UAEpB,WAArB8sB,GAAiCvZ,KAAKoF,SAAS5J,UAAUvW,SAASkzB,MAIjEmB,IACHtZ,KAAKoF,SAAS5jB,MAAMiL,UAAY,UAGlCuT,KAAKoF,SAAS5J,UAAUtE,IAAIihB,IAE5BnY,KAAK2F,gBAAe,KAClB3F,KAAKoF,SAAS5J,UAAUuH,OAAOoV,IAE/BnY,KAAK2F,gBAAe,KAClB3F,KAAKoF,SAAS5jB,MAAMiL,UAAY8sB,CAAgB,GAC/CvZ,KAAKuY,QAAQ,GACfvY,KAAKuY,SAERvY,KAAKoF,SAASsN,QAChB,CAMAmG,gBACE,MAAMS,EAAqBtZ,KAAKoF,SAAStX,aAAehI,SAASC,gBAAgBsC,aAE3E0sB,EAAiB/U,KAAK4Y,WAAWvE,WAEjCmF,EAAoBzE,EAAiB,EAE3C,GAAIyE,IAAsBF,EAAoB,CAC5C,MAAM/2B,EAAW4Z,KAAU,cAAgB,eAC3C6D,KAAKoF,SAAS5jB,MAAMe,GAAY,GAAGwyB,KACrC,CAEA,IAAKyE,GAAqBF,EAAoB,CAC5C,MAAM/2B,EAAW4Z,KAAU,eAAiB,cAC5C6D,KAAKoF,SAAS5jB,MAAMe,GAAY,GAAGwyB,KACrC,CACF,CAEAsE,oBACErZ,KAAKoF,SAAS5jB,MAAMi4B,YAAc,GAClCzZ,KAAKoF,SAAS5jB,MAAMk4B,aAAe,EACrC,CAGA7T,uBAAuBxB,EAAQvE,GAC7B,OAAOE,KAAK4G,MAAK,WACf,MAAM9b,EAAOwtB,GAAMjS,oBAAoBrG,KAAMqE,GAE7C,GAAsB,iBAAXA,EAAX,CAIA,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,GAAQvE,EANb,CAOF,GACF,EAQFS,GAAaY,GAAGrb,SAAUkyB,GApTK,4BAoT2C,SAAU5Y,GAClF,MAAMpS,EAASsN,GAAuB0F,MAElC,CAAC,IAAK,QAAQ9F,SAAS8F,KAAKoG,UAC9BhH,EAAM+C,iBAGR5B,GAAaa,IAAIpU,EAAQ0qB,IAAciC,IACjCA,EAAU9X,kBAKdtB,GAAaa,IAAIpU,EAAQyqB,IAAgB,KACnC3c,GAAUkF,OACZA,KAAK0S,OACP,GACA,IAGJ,MAAMkH,EAAc3S,GAAeC,QA3Ub,eA6UlB0S,GACFtB,GAAMxS,YAAY8T,GAAahK,OAGpB0I,GAAMjS,oBAAoBrZ,GAClC+Z,OAAO/G,KACd,IACAgG,GAAqBsS,IAKrBjc,GAAmBic,IAYnB,MAEMuB,GAAc,gBACdC,GAAiB,YACjBC,GAAwB,OAAOF,KAAcC,KAE7CE,GAAoB,OACpBC,GAAuB,UACvBC,GAAoB,SAEpBC,GAAgB,kBAChBC,GAAe,OAAOP,KACtBQ,GAAgB,QAAQR,KACxBS,GAAe,OAAOT,KACtBU,GAAuB,gBAAgBV,KACvCW,GAAiB,SAASX,KAC1BY,GAAe,SAASZ,KACxBa,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAwB,kBAAkBd,KAE1Ce,GAAY,CAChBzE,UAAU,EACVpL,UAAU,EACV7f,QAAQ,GAEJ2vB,GAAgB,CACpB1E,SAAU,mBACVpL,SAAU,UACV7f,OAAQ,WAMV,MAAM4vB,WAAkB5V,GACtBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAK2P,UAAW,EAChB3P,KAAKwY,UAAYxY,KAAKyY,sBACtBzY,KAAK0Y,WAAa1Y,KAAK2Y,uBAEvB3Y,KAAK4L,oBACP,CAGW3H,qBACT,OAAO2W,EACT,CAEW1W,yBACT,OAAO2W,EACT,CAEWpe,kBACT,MAtDW,WAuDb,CAGAsK,OAAOjH,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CAEA+P,KAAK/P,GACCE,KAAK2P,UAISpP,GAAakB,QAAQzB,KAAKoF,SAAUgV,GAAc,CAClEta,kBAGY+B,mBAId7B,KAAK2P,UAAW,EAEhB3P,KAAKwY,UAAU3I,OAEV7P,KAAKqF,QAAQna,SAChB,IAAIkpB,IAAkBxE,OAGxB5P,KAAKoF,SAASvjB,aAAa,cAAc,GAEzCme,KAAKoF,SAASvjB,aAAa,OAAQ,UAEnCme,KAAKoF,SAAS5J,UAAUtE,IAAI+iB,IAgB5Bja,KAAK2F,gBAdoB,KAClB3F,KAAKqF,QAAQna,SAAU8U,KAAKqF,QAAQ8Q,UACvCnW,KAAK0Y,WAAWzB,WAGlBjX,KAAKoF,SAAS5J,UAAUtE,IAAI8iB,IAE5Bha,KAAKoF,SAAS5J,UAAUuH,OAAOkX,IAE/B1Z,GAAakB,QAAQzB,KAAKoF,SAAUiV,GAAe,CACjDva,iBACA,GAGkCE,KAAKoF,UAAU,GACvD,CAEAwK,OACO5P,KAAK2P,WAIQpP,GAAakB,QAAQzB,KAAKoF,SAAUkV,IAExCzY,mBAId7B,KAAK0Y,WAAWtB,aAEhBpX,KAAKoF,SAAS2V,OAEd/a,KAAK2P,UAAW,EAEhB3P,KAAKoF,SAAS5J,UAAUtE,IAAIgjB,IAE5Bla,KAAKwY,UAAU5I,OAgBf5P,KAAK2F,gBAdoB,KACvB3F,KAAKoF,SAAS5J,UAAUuH,OAAOiX,GAAmBE,IAElDla,KAAKoF,SAASxjB,gBAAgB,cAE9Boe,KAAKoF,SAASxjB,gBAAgB,QAEzBoe,KAAKqF,QAAQna,SAChB,IAAIkpB,IAAkBthB,QAGxByN,GAAakB,QAAQzB,KAAKoF,SAAUoV,GAAe,GAGfxa,KAAKoF,UAAU,IACvD,CAEAG,UACEvF,KAAKwY,UAAUjT,UAEfvF,KAAK0Y,WAAWtB,aAEhBjS,MAAMI,SACR,CAGAkT,sBACE,MAUM3d,EAAYgG,QAAQd,KAAKqF,QAAQ8Q,UACvC,OAAO,IAAIL,GAAS,CAClBJ,UA7JsB,qBA8JtB5a,YACA8K,YAAY,EACZgQ,YAAa5V,KAAKoF,SAAS5f,WAC3BmwB,cAAe7a,EAhBK,KACU,WAA1BkF,KAAKqF,QAAQ8Q,SAKjBnW,KAAK4P,OAJHrP,GAAakB,QAAQzB,KAAKoF,SAAUmV,GAI3B,EAUgC,MAE/C,CAEA5B,uBACE,OAAO,IAAI7B,GAAU,CACnBF,YAAa5W,KAAKoF,UAEtB,CAEAwG,qBACErL,GAAaY,GAAGnB,KAAKoF,SAAUuV,IAAuBvb,IAhLvC,WAiLTA,EAAM7hB,MAILyiB,KAAKqF,QAAQ0F,SAKlB/K,KAAK4P,OAJHrP,GAAakB,QAAQzB,KAAKoF,SAAUmV,IAI3B,GAEf,CAGA1U,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOgwB,GAAUzU,oBAAoBrG,KAAMqE,GAEjD,GAAsB,iBAAXA,EAAX,CAIA,QAAqB7K,IAAjB1O,EAAKuZ,IAAyBA,EAAOlK,WAAW,MAAmB,gBAAXkK,EAC1D,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,GAAQrE,KANb,CAOF,GACF,EAQFO,GAAaY,GAAGrb,SAAU40B,GAvMK,gCAuM2C,SAAUtb,GAClF,MAAMpS,EAASsN,GAAuB0F,MAMtC,GAJI,CAAC,IAAK,QAAQ9F,SAAS8F,KAAKoG,UAC9BhH,EAAM+C,iBAGJ9G,GAAW2E,MACb,OAGFO,GAAaa,IAAIpU,EAAQwtB,IAAgB,KAEnC1f,GAAUkF,OACZA,KAAK0S,OACP,IAGF,MAAMkH,EAAc3S,GAAeC,QAAQiT,IAEvCP,GAAeA,IAAgB5sB,GACjC8tB,GAAUhV,YAAY8T,GAAahK,OAGxBkL,GAAUzU,oBAAoBrZ,GACtC+Z,OAAO/G,KACd,IACAO,GAAaY,GAAG9gB,OAAQ05B,IAAuB,KAC7C,IAAK,MAAMhgB,KAAYkN,GAAerU,KAAKunB,IACzCW,GAAUzU,oBAAoBtM,GAAU8V,MAC1C,IAEFtP,GAAaY,GAAG9gB,OAAQo6B,IAAc,KACpC,IAAK,MAAMz6B,KAAWinB,GAAerU,KAAK,gDACG,UAAvClN,iBAAiB1F,GAASiC,UAC5B64B,GAAUzU,oBAAoBrmB,GAAS4vB,MAE3C,IAEF5J,GAAqB8U,IAKrBze,GAAmBye,IAQnB,MAAME,GAAgB,IAAIjkB,IAAI,CAAC,aAAc,OAAQ,OAAQ,WAAY,WAAY,SAAU,MAAO,eAQhGkkB,GAAmB,iEAOnBC,GAAmB,qIAEnBC,GAAmB,CAAC34B,EAAW44B,KACnC,MAAMC,EAAgB74B,EAAUvC,SAASC,cAEzC,OAAIk7B,EAAqBlhB,SAASmhB,IAC5BL,GAAc5jB,IAAIikB,IACbva,QAAQma,GAAiBn3B,KAAKtB,EAAU84B,YAAcJ,GAAiBp3B,KAAKtB,EAAU84B,YAO1FF,EAAqBx0B,QAAO20B,GAAkBA,aAA0BxW,SAAQ7R,MAAKsoB,GAASA,EAAM13B,KAAKu3B,IAAe,EAG3HI,GAAmB,CAEvB,IAAK,CAAC,QAAS,MAAO,KAAM,OAAQ,OAjCP,kBAkC7BnqB,EAAG,CAAC,SAAU,OAAQ,QAAS,OAC/BoqB,KAAM,GACNnqB,EAAG,GACHoqB,GAAI,GACJC,IAAK,GACLC,KAAM,GACNC,IAAK,GACLC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJxqB,EAAG,GACHgb,IAAK,CAAC,MAAO,SAAU,MAAO,QAAS,QAAS,UAChDyP,GAAI,GACJC,GAAI,GACJC,EAAG,GACHC,IAAK,GACLC,EAAG,GACHC,MAAO,GACPC,KAAM,GACNC,IAAK,GACLC,IAAK,GACLC,OAAQ,GACRC,EAAG,GACHC,GAAI,IA+CAC,GAAY,CAChBC,UAAW3B,GACX4B,QAAS,CAAC,EAEVC,WAAY,GACZhwB,MAAM,EACNiwB,UAAU,EACVC,WAAY,KACZC,SAAU,eAENC,GAAgB,CACpBN,UAAW,SACXC,QAAS,SACTC,WAAY,oBACZhwB,KAAM,UACNiwB,SAAU,UACVC,WAAY,kBACZC,SAAU,UAENE,GAAqB,CACzBC,MAAO,iCACP7jB,SAAU,oBAMZ,MAAM8jB,WAAwB7Z,GAC5BU,YAAYL,GACVc,QACAnF,KAAKqF,QAAUrF,KAAKoE,WAAWC,EACjC,CAGWJ,qBACT,OAAOkZ,EACT,CAEWjZ,yBACT,OAAOwZ,EACT,CAEWjhB,kBACT,MA5CW,iBA6Cb,CAGAqhB,aACE,OAAOrgC,OAAO0hB,OAAOa,KAAKqF,QAAQgY,SAAS95B,KAAI8gB,GAAUrE,KAAK+d,yBAAyB1Z,KAASzd,OAAOka,QACzG,CAEAkd,aACE,OAAOhe,KAAK8d,aAAa3sB,OAAS,CACpC,CAEA8sB,cAAcZ,GAMZ,OALArd,KAAKke,cAAcb,GAEnBrd,KAAKqF,QAAQgY,QAAU,IAAKrd,KAAKqF,QAAQgY,WACpCA,GAEErd,IACT,CAEAme,SACE,MAAMC,EAAkBt4B,SAASswB,cAAc,OAC/CgI,EAAgBC,UAAYre,KAAKse,eAAete,KAAKqF,QAAQoY,UAE7D,IAAK,MAAO1jB,EAAUwkB,KAAS9gC,OAAO4kB,QAAQrC,KAAKqF,QAAQgY,SACzDrd,KAAKwe,YAAYJ,EAAiBG,EAAMxkB,GAG1C,MAAM0jB,EAAWW,EAAgBjX,SAAS,GAEpCmW,EAAatd,KAAK+d,yBAAyB/d,KAAKqF,QAAQiY,YAM9D,OAJIA,GACFG,EAASjiB,UAAUtE,OAAOomB,EAAW36B,MAAM,MAGtC86B,CACT,CAGAjZ,iBAAiBH,GACfc,MAAMX,iBAAiBH,GAEvBrE,KAAKke,cAAc7Z,EAAOgZ,QAC5B,CAEAa,cAAcO,GACZ,IAAK,MAAO1kB,EAAUsjB,KAAY5/B,OAAO4kB,QAAQoc,GAC/CtZ,MAAMX,iBAAiB,CACrBzK,WACA6jB,MAAOP,GACNM,GAEP,CAEAa,YAAYf,EAAUJ,EAAStjB,GAC7B,MAAM2kB,EAAkBzX,GAAeC,QAAQnN,EAAU0jB,GAEpDiB,KAILrB,EAAUrd,KAAK+d,yBAAyBV,IAOpC,GAAUA,GACZrd,KAAK2e,sBAAsB9jB,GAAWwiB,GAAUqB,GAK9C1e,KAAKqF,QAAQ/X,KACfoxB,EAAgBL,UAAYre,KAAKse,eAAejB,GAIlDqB,EAAgBE,YAAcvB,EAf5BqB,EAAgB3b,SAgBpB,CAEAub,eAAeG,GACb,OAAOze,KAAKqF,QAAQkY,SA7KxB,SAAsBsB,EAAYzB,EAAW0B,GAC3C,IAAKD,EAAW1tB,OACd,OAAO0tB,EAGT,GAAIC,GAAgD,mBAArBA,EAC7B,OAAOA,EAAiBD,GAG1B,MACME,GADY,IAAI1+B,OAAO2+B,WACKC,gBAAgBJ,EAAY,aACxDv9B,EAAW,GAAGlC,UAAU2/B,EAAgBpyB,KAAKyT,iBAAiB,MAEpE,IAAK,MAAMpgB,KAAWsB,EAAU,CAC9B,MAAM49B,EAAcl/B,EAAQC,SAASC,cAErC,IAAKzC,OAAO4D,KAAK+7B,GAAWljB,SAASglB,GAAc,CACjDl/B,EAAQ+iB,SACR,QACF,CAEA,MAAMoc,EAAgB,GAAG//B,UAAUY,EAAQ0B,YACrC09B,EAAoB,GAAGhgC,OAAOg+B,EAAU,MAAQ,GAAIA,EAAU8B,IAAgB,IAEpF,IAAK,MAAM18B,KAAa28B,EACjBhE,GAAiB34B,EAAW48B,IAC/Bp/B,EAAQ4B,gBAAgBY,EAAUvC,SAGxC,CAEA,OAAO8+B,EAAgBpyB,KAAK0xB,SAC9B,CA6ImCgB,CAAaZ,EAAKze,KAAKqF,QAAQ+X,UAAWpd,KAAKqF,QAAQmY,YAAciB,CACtG,CAEAV,yBAAyBU,GACvB,MAAsB,mBAARA,EAAqBA,EAAIze,MAAQye,CACjD,CAEAE,sBAAsB3+B,EAAS0+B,GAC7B,GAAI1e,KAAKqF,QAAQ/X,KAGf,OAFAoxB,EAAgBL,UAAY,QAC5BK,EAAgBrI,OAAOr2B,GAIzB0+B,EAAgBE,YAAc5+B,EAAQ4+B,WACxC,EAcF,MACMU,GAAwB,IAAIvoB,IAAI,CAAC,WAAY,YAAa,eAC1DwoB,GAAoB,OAEpBC,GAAoB,OAEpBC,GAAiB,SACjBC,GAAmB,gBACnBC,GAAgB,QAChBC,GAAgB,QAahBC,GAAgB,CACpBC,KAAM,OACNC,IAAK,MACLC,MAAO7jB,KAAU,OAAS,QAC1B8jB,OAAQ,SACRC,KAAM/jB,KAAU,QAAU,QAEtBgkB,GAAY,CAChB/C,UAAW3B,GACX2E,WAAW,EACX1xB,SAAU,kBACV2xB,WAAW,EACXC,YAAa,GACbC,MAAO,EACP9vB,mBAAoB,CAAC,MAAO,QAAS,SAAU,QAC/CnD,MAAM,EACN7E,OAAQ,CAAC,EAAG,GACZtJ,UAAW,MACX8yB,aAAc,KACdsL,UAAU,EACVC,WAAY,KACZzjB,UAAU,EACV0jB,SAAU,+GACV+C,MAAO,GACP/e,QAAS,eAELgf,GAAgB,CACpBrD,UAAW,SACXgD,UAAW,UACX1xB,SAAU,mBACV2xB,UAAW,2BACXC,YAAa,oBACbC,MAAO,kBACP9vB,mBAAoB,QACpBnD,KAAM,UACN7E,OAAQ,0BACRtJ,UAAW,oBACX8yB,aAAc,yBACdsL,SAAU,UACVC,WAAY,kBACZzjB,SAAU,mBACV0jB,SAAU,SACV+C,MAAO,4BACP/e,QAAS,UAMX,MAAMif,WAAgBxb,GACpBR,YAAY1kB,EAASqkB,GACnB,QAAsB,IAAX,EACT,MAAM,IAAIW,UAAU,+DAGtBG,MAAMnlB,EAASqkB,GAEfrE,KAAK2gB,YAAa,EAClB3gB,KAAK4gB,SAAW,EAChB5gB,KAAK6gB,WAAa,KAClB7gB,KAAK8gB,eAAiB,CAAC,EACvB9gB,KAAKoS,QAAU,KACfpS,KAAK+gB,iBAAmB,KACxB/gB,KAAKghB,YAAc,KAEnBhhB,KAAKihB,IAAM,KAEXjhB,KAAKkhB,gBAEAlhB,KAAKqF,QAAQtL,UAChBiG,KAAKmhB,WAET,CAGWld,qBACT,OAAOkc,EACT,CAEWjc,yBACT,OAAOuc,EACT,CAEWhkB,kBACT,MA1GW,SA2Gb,CAGA2kB,SACEphB,KAAK2gB,YAAa,CACpB,CAEAU,UACErhB,KAAK2gB,YAAa,CACpB,CAEAW,gBACEthB,KAAK2gB,YAAc3gB,KAAK2gB,UAC1B,CAEA5Z,SACO/G,KAAK2gB,aAIV3gB,KAAK8gB,eAAeS,OAASvhB,KAAK8gB,eAAeS,MAE7CvhB,KAAK2P,WACP3P,KAAKwhB,SAKPxhB,KAAKyhB,SACP,CAEAlc,UACE0H,aAAajN,KAAK4gB,UAClBrgB,GAAaC,IAAIR,KAAKoF,SAASjK,QAAQskB,IAAiBC,GAAkB1f,KAAK0hB,mBAE3E1hB,KAAKoF,SAASpL,aAAa,2BAC7BgG,KAAKoF,SAASvjB,aAAa,QAASme,KAAKoF,SAASpL,aAAa,2BAGjEgG,KAAK2hB,iBAELxc,MAAMI,SACR,CAEAsK,OACE,GAAoC,SAAhC7P,KAAKoF,SAAS5jB,MAAMwwB,QACtB,MAAM,IAAI7N,MAAM,uCAGlB,IAAMnE,KAAK4hB,mBAAoB5hB,KAAK2gB,WAClC,OAGF,MAAMhH,EAAYpZ,GAAakB,QAAQzB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UAlJtD,SAqJXkU,GAFalmB,GAAeqE,KAAKoF,WAELpF,KAAKoF,SAAS7kB,cAAcwF,iBAAiBd,SAAS+a,KAAKoF,UAE7F,GAAIuU,EAAU9X,mBAAqBggB,EACjC,OAIF7hB,KAAK2hB,iBAEL,MAAMV,EAAMjhB,KAAK8hB,iBAEjB9hB,KAAKoF,SAASvjB,aAAa,mBAAoBo/B,EAAIjnB,aAAa,OAEhE,MAAM,UACJqmB,GACErgB,KAAKqF,QAaT,GAXKrF,KAAKoF,SAAS7kB,cAAcwF,gBAAgBd,SAAS+a,KAAKihB,OAC7DZ,EAAUhK,OAAO4K,GACjB1gB,GAAakB,QAAQzB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UAtKpC,cAyKnB3N,KAAKoS,QAAUpS,KAAKyS,cAAcwO,GAClCA,EAAIzlB,UAAUtE,IAAIsoB,IAKd,iBAAkB15B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAKwa,UAC/C5G,GAAaY,GAAGnhB,EAAS,YAAa8b,IAc1CkE,KAAK2F,gBAVY,KACfpF,GAAakB,QAAQzB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UAvLrC,WAyLQ,IAApB3N,KAAK6gB,YACP7gB,KAAKwhB,SAGPxhB,KAAK6gB,YAAa,CAAK,GAGK7gB,KAAKihB,IAAKjhB,KAAKgO,cAC/C,CAEA4B,OACE,GAAK5P,KAAK2P,aAIQpP,GAAakB,QAAQzB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UA3MtD,SA6MH9L,iBAAd,CASA,GALY7B,KAAK8hB,iBAEbtmB,UAAUuH,OAAOyc,IAGjB,iBAAkB15B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAKwa,UAC/C5G,GAAaC,IAAIxgB,EAAS,YAAa8b,IAI3CkE,KAAK8gB,eAA4B,OAAI,EACrC9gB,KAAK8gB,eAAelB,KAAiB,EACrC5f,KAAK8gB,eAAenB,KAAiB,EACrC3f,KAAK6gB,WAAa,KAgBlB7gB,KAAK2F,gBAdY,KACX3F,KAAK+hB,yBAIJ/hB,KAAK6gB,YACR7gB,KAAK2hB,iBAGP3hB,KAAKoF,SAASxjB,gBAAgB,oBAE9B2e,GAAakB,QAAQzB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UA3OpC,WA2O8D,GAGnD3N,KAAKihB,IAAKjhB,KAAKgO,cAhC7C,CAiCF,CAEAxiB,SACMwU,KAAKoS,SACPpS,KAAKoS,QAAQ5mB,QAEjB,CAGAo2B,iBACE,OAAO9gB,QAAQd,KAAKgiB,YACtB,CAEAF,iBAKE,OAJK9hB,KAAKihB,MACRjhB,KAAKihB,IAAMjhB,KAAKiiB,kBAAkBjiB,KAAKghB,aAAehhB,KAAKkiB,2BAGtDliB,KAAKihB,GACd,CAEAgB,kBAAkB5E,GAChB,MAAM4D,EAAMjhB,KAAKmiB,oBAAoB9E,GAASc,SAG9C,IAAK8C,EACH,OAAO,KAGTA,EAAIzlB,UAAUuH,OAAOwc,GAAmBC,IAExCyB,EAAIzlB,UAAUtE,IAAI,MAAM8I,KAAK0E,YAAYjI,aACzC,MAAM2lB,EA92HKC,KACb,GACEA,GAAUz/B,KAAK0/B,MAlBH,IAkBS1/B,KAAK2/B,gBACnBz8B,SAAS08B,eAAeH,IAEjC,OAAOA,CAAM,EAy2HGI,CAAOziB,KAAK0E,YAAYjI,MAAMnc,WAO5C,OANA2gC,EAAIp/B,aAAa,KAAMugC,GAEnBpiB,KAAKgO,eACPiT,EAAIzlB,UAAUtE,IAAIqoB,IAGb0B,CACT,CAEAyB,WAAWrF,GACTrd,KAAKghB,YAAc3D,EAEfrd,KAAK2P,aACP3P,KAAK2hB,iBAEL3hB,KAAK6P,OAET,CAEAsS,oBAAoB9E,GAYlB,OAXIrd,KAAK+gB,iBACP/gB,KAAK+gB,iBAAiB9C,cAAcZ,GAEpCrd,KAAK+gB,iBAAmB,IAAIlD,GAAgB,IAAK7d,KAAKqF,QAGpDgY,UACAC,WAAYtd,KAAK+d,yBAAyB/d,KAAKqF,QAAQib,eAIpDtgB,KAAK+gB,gBACd,CAEAmB,yBACE,MAAO,CACL,iBAA0BliB,KAAKgiB,YAEnC,CAEAA,YACE,OAAOhiB,KAAK+d,yBAAyB/d,KAAKqF,QAAQmb,QAAUxgB,KAAKoF,SAASpL,aAAa,yBACzF,CAGA2oB,6BAA6BvjB,GAC3B,OAAOY,KAAK0E,YAAY2B,oBAAoBjH,EAAMW,eAAgBC,KAAK4iB,qBACzE,CAEA5U,cACE,OAAOhO,KAAKqF,QAAQ+a,WAAapgB,KAAKihB,KAAOjhB,KAAKihB,IAAIzlB,UAAUvW,SAASs6B,GAC3E,CAEA5P,WACE,OAAO3P,KAAKihB,KAAOjhB,KAAKihB,IAAIzlB,UAAUvW,SAASu6B,GACjD,CAEA/M,cAAcwO,GACZ,MAAM9hC,EAA8C,mBAA3B6gB,KAAKqF,QAAQlmB,UAA2B6gB,KAAKqF,QAAQlmB,UAAUlB,KAAK+hB,KAAMihB,EAAKjhB,KAAKoF,UAAYpF,KAAKqF,QAAQlmB,UAChI0jC,EAAahD,GAAc1gC,EAAU8lB,eAC3C,OAAO,GAAoBjF,KAAKoF,SAAU6b,EAAKjhB,KAAK6S,iBAAiBgQ,GACvE,CAEA5P,aACE,MAAM,OACJxqB,GACEuX,KAAKqF,QAET,MAAsB,iBAAX5c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAASmf,OAAO+P,SAASlvB,EAAO,MAGzC,mBAAXqK,EACFyqB,GAAczqB,EAAOyqB,EAAYlT,KAAKoF,UAGxC3c,CACT,CAEAs1B,yBAAyBU,GACvB,MAAsB,mBAARA,EAAqBA,EAAIxgC,KAAK+hB,KAAKoF,UAAYqZ,CAC/D,CAEA5L,iBAAiBgQ,GACf,MAAM1P,EAAwB,CAC5Bh0B,UAAW0jC,EACXhsB,UAAW,CAAC,CACV9V,KAAM,OACNmB,QAAS,CACPuO,mBAAoBuP,KAAKqF,QAAQ5U,qBAElC,CACD1P,KAAM,SACNmB,QAAS,CACPuG,OAAQuX,KAAKiT,eAEd,CACDlyB,KAAM,kBACNmB,QAAS,CACPwM,SAAUsR,KAAKqF,QAAQ3W,WAExB,CACD3N,KAAM,QACNmB,QAAS,CACPlC,QAAS,IAAIggB,KAAK0E,YAAYjI,eAE/B,CACD1b,KAAM,kBACNC,SAAS,EACTC,MAAO,aACPC,GAAI4J,IAGFkV,KAAK8hB,iBAAiBjgC,aAAa,wBAAyBiJ,EAAK1J,MAAMjC,UAAU,KAIvF,MAAO,IAAKg0B,KAC+B,mBAA9BnT,KAAKqF,QAAQ4M,aAA8BjS,KAAKqF,QAAQ4M,aAAakB,GAAyBnT,KAAKqF,QAAQ4M,aAE1H,CAEAiP,gBACE,MAAM4B,EAAW9iB,KAAKqF,QAAQ5D,QAAQ9e,MAAM,KAE5C,IAAK,MAAM8e,KAAWqhB,EACpB,GAAgB,UAAZrhB,EACFlB,GAAaY,GAAGnB,KAAKoF,SAAUpF,KAAK0E,YAAYiJ,UA3YlC,SA2Y4D3N,KAAKqF,QAAQtL,UAAUqF,IAC/EY,KAAK2iB,6BAA6BvjB,GAE1C2H,QAAQ,SAEb,GAtZU,WAsZNtF,EAA4B,CACrC,MAAMshB,EAAUthB,IAAYke,GAAgB3f,KAAK0E,YAAYiJ,UA9Y5C,cA8Y0E3N,KAAK0E,YAAYiJ,UAhZ5F,WAiZVqV,EAAWvhB,IAAYke,GAAgB3f,KAAK0E,YAAYiJ,UA9Y7C,cA8Y2E3N,KAAK0E,YAAYiJ,UAhZ5F,YAiZjBpN,GAAaY,GAAGnB,KAAKoF,SAAU2d,EAAS/iB,KAAKqF,QAAQtL,UAAUqF,IAC7D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAElDkU,EAAQwN,eAA8B,YAAf1hB,EAAMqB,KAAqBmf,GAAgBD,KAAiB,EAEnFrM,EAAQmO,QAAQ,IAElBlhB,GAAaY,GAAGnB,KAAKoF,SAAU4d,EAAUhjB,KAAKqF,QAAQtL,UAAUqF,IAC9D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAElDkU,EAAQwN,eAA8B,aAAf1hB,EAAMqB,KAAsBmf,GAAgBD,IAAiBrM,EAAQlO,SAASngB,SAASma,EAAMU,eAEpHwT,EAAQkO,QAAQ,GAEpB,CAGFxhB,KAAK0hB,kBAAoB,KACnB1hB,KAAKoF,UACPpF,KAAK4P,MACP,EAGFrP,GAAaY,GAAGnB,KAAKoF,SAASjK,QAAQskB,IAAiBC,GAAkB1f,KAAK0hB,kBAChF,CAEAP,YACE,MAAMX,EAAQxgB,KAAKoF,SAASpL,aAAa,SAEpCwmB,IAIAxgB,KAAKoF,SAASpL,aAAa,eAAkBgG,KAAKoF,SAASwZ,YAAYxkB,QAC1E4F,KAAKoF,SAASvjB,aAAa,aAAc2+B,GAG3CxgB,KAAKoF,SAASvjB,aAAa,yBAA0B2+B,GAGrDxgB,KAAKoF,SAASxjB,gBAAgB,SAChC,CAEA6/B,SACMzhB,KAAK2P,YAAc3P,KAAK6gB,WAC1B7gB,KAAK6gB,YAAa,GAIpB7gB,KAAK6gB,YAAa,EAElB7gB,KAAKijB,aAAY,KACXjjB,KAAK6gB,YACP7gB,KAAK6P,MACP,GACC7P,KAAKqF,QAAQkb,MAAM1Q,MACxB,CAEA2R,SACMxhB,KAAK+hB,yBAIT/hB,KAAK6gB,YAAa,EAElB7gB,KAAKijB,aAAY,KACVjjB,KAAK6gB,YACR7gB,KAAK4P,MACP,GACC5P,KAAKqF,QAAQkb,MAAM3Q,MACxB,CAEAqT,YAAYrlB,EAASslB,GACnBjW,aAAajN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW/iB,WAAWD,EAASslB,EACtC,CAEAnB,uBACE,OAAOtkC,OAAO0hB,OAAOa,KAAK8gB,gBAAgB5mB,UAAS,EACrD,CAEAkK,WAAWC,GACT,MAAM8e,EAAiB5f,GAAYG,kBAAkB1D,KAAKoF,UAE1D,IAAK,MAAMge,KAAiB3lC,OAAO4D,KAAK8hC,GAClC7D,GAAsBloB,IAAIgsB,WACrBD,EAAeC,GAY1B,OARA/e,EAAS,IAAK8e,KACU,iBAAX9e,GAAuBA,EAASA,EAAS,CAAC,GAEvDA,EAASrE,KAAKsE,gBAAgBD,GAC9BA,EAASrE,KAAKuE,kBAAkBF,GAEhCrE,KAAKwE,iBAAiBH,GAEfA,CACT,CAEAE,kBAAkBF,GAkBhB,OAjBAA,EAAOgc,WAAiC,IAArBhc,EAAOgc,UAAsBv6B,SAAS6G,KAAOkO,GAAWwJ,EAAOgc,WAEtD,iBAAjBhc,EAAOkc,QAChBlc,EAAOkc,MAAQ,CACb1Q,KAAMxL,EAAOkc,MACb3Q,KAAMvL,EAAOkc,QAIW,iBAAjBlc,EAAOmc,QAChBnc,EAAOmc,MAAQnc,EAAOmc,MAAMlgC,YAGA,iBAAnB+jB,EAAOgZ,UAChBhZ,EAAOgZ,QAAUhZ,EAAOgZ,QAAQ/8B,YAG3B+jB,CACT,CAEAue,qBACE,MAAMve,EAAS,CAAC,EAEhB,IAAK,MAAM9mB,KAAOyiB,KAAKqF,QACjBrF,KAAK0E,YAAYT,QAAQ1mB,KAASyiB,KAAKqF,QAAQ9nB,KACjD8mB,EAAO9mB,GAAOyiB,KAAKqF,QAAQ9nB,IAS/B,OALA8mB,EAAOtK,UAAW,EAClBsK,EAAO5C,QAAU,SAIV4C,CACT,CAEAsd,iBACM3hB,KAAKoS,UACPpS,KAAKoS,QAAQ3Y,UAEbuG,KAAKoS,QAAU,MAGbpS,KAAKihB,MACPjhB,KAAKihB,IAAIle,SACT/C,KAAKihB,IAAM,KAEf,CAGApb,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAO41B,GAAQra,oBAAoBrG,KAAMqE,GAE/C,GAAsB,iBAAXA,EAAX,CAIA,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IANL,CAOF,GACF,EAQFhI,GAAmBqkB,IAYnB,MAGM2C,GAAY,IAAK3C,GAAQzc,QAC7BoZ,QAAS,GACT50B,OAAQ,CAAC,EAAG,GACZtJ,UAAW,QACXs+B,SAAU,8IACVhc,QAAS,SAEL6hB,GAAgB,IAAK5C,GAAQxc,YACjCmZ,QAAS,kCAMX,MAAMkG,WAAgB7C,GAETzc,qBACT,OAAOof,EACT,CAEWnf,yBACT,OAAOof,EACT,CAEW7mB,kBACT,MA5BW,SA6Bb,CAGAmlB,iBACE,OAAO5hB,KAAKgiB,aAAehiB,KAAKwjB,aAClC,CAGAtB,yBACE,MAAO,CACL,kBAAkBliB,KAAKgiB,YACvB,gBAAoBhiB,KAAKwjB,cAE7B,CAEAA,cACE,OAAOxjB,KAAK+d,yBAAyB/d,KAAKqF,QAAQgY,QACpD,CAGAxX,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOy4B,GAAQld,oBAAoBrG,KAAMqE,GAE/C,GAAsB,iBAAXA,EAAX,CAIA,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IANL,CAOF,GACF,EAQFhI,GAAmBknB,IAYnB,MAEME,GAAc,gBAEdC,GAAiB,WAAWD,KAC5BE,GAAc,QAAQF,KACtBG,GAAwB,OAAOH,cAE/BI,GAAsB,SAEtBC,GAAwB,SAExBC,GAAqB,YAGrBC,GAAsB,GAAGD,mBAA+CA,uBAGxEE,GAAY,CAChBx7B,OAAQ,KAERy7B,WAAY,eACZC,cAAc,EACdn3B,OAAQ,KACRo3B,UAAW,CAAC,GAAK,GAAK,IAElBC,GAAgB,CACpB57B,OAAQ,gBAERy7B,WAAY,SACZC,aAAc,UACdn3B,OAAQ,UACRo3B,UAAW,SAMb,MAAME,WAAkBpf,GACtBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GAEfrE,KAAKukB,aAAe,IAAI5yB,IACxBqO,KAAKwkB,oBAAsB,IAAI7yB,IAC/BqO,KAAKykB,aAA6D,YAA9C/+B,iBAAiBsa,KAAKoF,UAAU3Y,UAA0B,KAAOuT,KAAKoF,SAC1FpF,KAAK0kB,cAAgB,KACrB1kB,KAAK2kB,UAAY,KACjB3kB,KAAK4kB,oBAAsB,CACzBC,gBAAiB,EACjBC,gBAAiB,GAEnB9kB,KAAK+kB,SACP,CAGW9gB,qBACT,OAAOggB,EACT,CAEW/f,yBACT,OAAOmgB,EACT,CAEW5nB,kBACT,MAhEW,WAiEb,CAGAsoB,UACE/kB,KAAKglB,mCAELhlB,KAAKilB,2BAEDjlB,KAAK2kB,UACP3kB,KAAK2kB,UAAUO,aAEfllB,KAAK2kB,UAAY3kB,KAAKmlB,kBAGxB,IAAK,MAAMC,KAAWplB,KAAKwkB,oBAAoBrlB,SAC7Ca,KAAK2kB,UAAUU,QAAQD,EAE3B,CAEA7f,UACEvF,KAAK2kB,UAAUO,aAEf/f,MAAMI,SACR,CAGAhB,kBAAkBF,GAUhB,OARAA,EAAOrX,OAAS6N,GAAWwJ,EAAOrX,SAAWlH,SAAS6G,KAEtD0X,EAAO6f,WAAa7f,EAAO5b,OAAS,GAAG4b,EAAO5b,oBAAsB4b,EAAO6f,WAE3C,iBAArB7f,EAAO+f,YAChB/f,EAAO+f,UAAY/f,EAAO+f,UAAUzhC,MAAM,KAAKY,KAAInF,GAASmf,OAAOC,WAAWpf,MAGzEimB,CACT,CAEA4gB,2BACOjlB,KAAKqF,QAAQ8e,eAKlB5jB,GAAaC,IAAIR,KAAKqF,QAAQrY,OAAQ22B,IACtCpjB,GAAaY,GAAGnB,KAAKqF,QAAQrY,OAAQ22B,GAAaG,IAAuB1kB,IACvE,MAAMkmB,EAAoBtlB,KAAKwkB,oBAAoB5mC,IAAIwhB,EAAMpS,OAAOtB,MAEpE,GAAI45B,EAAmB,CACrBlmB,EAAM+C,iBACN,MAAMtG,EAAOmE,KAAKykB,cAAgBpkC,OAC5BmE,EAAS8gC,EAAkBxgC,UAAYkb,KAAKoF,SAAStgB,UAE3D,GAAI+W,EAAK0pB,SAKP,YAJA1pB,EAAK0pB,SAAS,CACZnjC,IAAKoC,EACLghC,SAAU,WAMd3pB,EAAK3P,UAAY1H,CACnB,KAEJ,CAEA2gC,kBACE,MAAMjjC,EAAU,CACd2Z,KAAMmE,KAAKykB,aACXL,UAAWpkB,KAAKqF,QAAQ+e,UACxBF,WAAYlkB,KAAKqF,QAAQ6e,YAE3B,OAAO,IAAIuB,sBAAqBpjB,GAAWrC,KAAK0lB,kBAAkBrjB,IAAUngB,EAC9E,CAGAwjC,kBAAkBrjB,GAChB,MAAMsjB,EAAgB/H,GAAS5d,KAAKukB,aAAa3mC,IAAI,IAAIggC,EAAM5wB,OAAO44B,MAEhE3O,EAAW2G,IACf5d,KAAK4kB,oBAAoBC,gBAAkBjH,EAAM5wB,OAAOlI,UAExDkb,KAAK6lB,SAASF,EAAc/H,GAAO,EAG/BkH,GAAmB9kB,KAAKykB,cAAgB3+B,SAASC,iBAAiBmG,UAClE45B,EAAkBhB,GAAmB9kB,KAAK4kB,oBAAoBE,gBACpE9kB,KAAK4kB,oBAAoBE,gBAAkBA,EAE3C,IAAK,MAAMlH,KAASvb,EAAS,CAC3B,IAAKub,EAAMmI,eAAgB,CACzB/lB,KAAK0kB,cAAgB,KAErB1kB,KAAKgmB,kBAAkBL,EAAc/H,IAErC,QACF,CAEA,MAAMqI,EAA2BrI,EAAM5wB,OAAOlI,WAAakb,KAAK4kB,oBAAoBC,gBAEpF,GAAIiB,GAAmBG,GAGrB,GAFAhP,EAAS2G,IAEJkH,EACH,YAOCgB,GAAoBG,GACvBhP,EAAS2G,EAEb,CACF,CAEAoH,mCACEhlB,KAAKukB,aAAe,IAAI5yB,IACxBqO,KAAKwkB,oBAAsB,IAAI7yB,IAC/B,MAAMu0B,EAAcjf,GAAerU,KAAKkxB,GAAuB9jB,KAAKqF,QAAQrY,QAE5E,IAAK,MAAMm5B,KAAUD,EAAa,CAEhC,IAAKC,EAAOz6B,MAAQ2P,GAAW8qB,GAC7B,SAGF,MAAMb,EAAoBre,GAAeC,QAAQif,EAAOz6B,KAAMsU,KAAKoF,UAE/DtK,GAAUwqB,KACZtlB,KAAKukB,aAAa/xB,IAAI2zB,EAAOz6B,KAAMy6B,GAEnCnmB,KAAKwkB,oBAAoBhyB,IAAI2zB,EAAOz6B,KAAM45B,GAE9C,CACF,CAEAO,SAAS74B,GACHgT,KAAK0kB,gBAAkB13B,IAI3BgT,KAAKgmB,kBAAkBhmB,KAAKqF,QAAQrY,QAEpCgT,KAAK0kB,cAAgB13B,EACrBA,EAAOwO,UAAUtE,IAAI2sB,IAErB7jB,KAAKomB,iBAAiBp5B,GAEtBuT,GAAakB,QAAQzB,KAAKoF,SAAUse,GAAgB,CAClD5jB,cAAe9S,IAEnB,CAEAo5B,iBAAiBp5B,GAEf,GAAIA,EAAOwO,UAAUvW,SAzNQ,iBA0N3BgiB,GAAeC,QAhNc,mBAgNsBla,EAAOmO,QAjNtC,cAiNkEK,UAAUtE,IAAI2sB,SAItG,IAAK,MAAMwC,KAAapf,GAAeI,QAAQra,EA1NnB,qBA6N1B,IAAK,MAAMxJ,KAAQyjB,GAAeM,KAAK8e,EAAWrC,IAChDxgC,EAAKgY,UAAUtE,IAAI2sB,GAGzB,CAEAmC,kBAAkB9gC,GAChBA,EAAOsW,UAAUuH,OAAO8gB,IACxB,MAAMyC,EAAcrf,GAAerU,KAAK,GAAGkxB,MAAyBD,KAAuB3+B,GAE3F,IAAK,MAAM9E,KAAQkmC,EACjBlmC,EAAKob,UAAUuH,OAAO8gB,GAE1B,CAGAhe,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAOw5B,GAAUje,oBAAoBrG,KAAMqE,GAEjD,GAAsB,iBAAXA,EAAX,CAIA,QAAqB7K,IAAjB1O,EAAKuZ,IAAyBA,EAAOlK,WAAW,MAAmB,gBAAXkK,EAC1D,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IANL,CAOF,GACF,EAQF9D,GAAaY,GAAG9gB,OAAQujC,IAAuB,KAC7C,IAAK,MAAM2C,KAAOtf,GAAerU,KAtQT,0BAuQtB0xB,GAAUje,oBAAoBkgB,EAChC,IAMFlqB,GAAmBioB,IAYnB,MAEMkC,GAAc,UACdC,GAAe,OAAOD,KACtBE,GAAiB,SAASF,KAC1BG,GAAe,OAAOH,KACtBI,GAAgB,QAAQJ,KACxBK,GAAuB,QAAQL,KAC/BM,GAAgB,UAAUN,KAC1BO,GAAsB,OAAOP,KAC7BQ,GAAiB,YACjBC,GAAkB,aAClBC,GAAe,UACfC,GAAiB,YACjBC,GAAoB,SACpBC,GAAoB,OACpBC,GAAoB,OAIpBC,GAA+B,yBAI/BC,GAAuB,2EAEvBC,GAAsB,YAHOF,uBAAiDA,mBAA6CA,OAG/EC,KAC5CE,GAA8B,IAAIN,8BAA6CA,+BAA8CA,4BAKnI,MAAMO,WAAYziB,GAChBR,YAAY1kB,GACVmlB,MAAMnlB,GACNggB,KAAKqS,QAAUrS,KAAKoF,SAASjK,QAdN,uCAgBlB6E,KAAKqS,UAMVrS,KAAK4nB,sBAAsB5nB,KAAKqS,QAASrS,KAAK6nB,gBAE9CtnB,GAAaY,GAAGnB,KAAKoF,SAAU0hB,IAAe1nB,GAASY,KAAK4M,SAASxN,KACvE,CAGW3C,kBACT,MAlDW,KAmDb,CAGAoT,OAEE,MAAMiY,EAAY9nB,KAAKoF,SAEvB,GAAIpF,KAAK+nB,cAAcD,GACrB,OAIF,MAAME,EAAShoB,KAAKioB,iBAEdC,EAAYF,EAASznB,GAAakB,QAAQumB,EAAQvB,GAAc,CACpE3mB,cAAegoB,IACZ,KACavnB,GAAakB,QAAQqmB,EAAWnB,GAAc,CAC9D7mB,cAAekoB,IAGHnmB,kBAAoBqmB,GAAaA,EAAUrmB,mBAIzD7B,KAAKmoB,YAAYH,EAAQF,GAEzB9nB,KAAKooB,UAAUN,EAAWE,GAC5B,CAGAI,UAAUpoC,EAASqoC,GACZroC,IAILA,EAAQwb,UAAUtE,IAAIkwB,IAEtBpnB,KAAKooB,UAAU9tB,GAAuBta,IAmBtCggB,KAAK2F,gBAhBY,KACsB,QAAjC3lB,EAAQga,aAAa,SAKzBha,EAAQ4B,gBAAgB,YACxB5B,EAAQ6B,aAAa,iBAAiB,GAEtCme,KAAKsoB,gBAAgBtoC,GAAS,GAE9BugB,GAAakB,QAAQzhB,EAAS4mC,GAAe,CAC3C9mB,cAAeuoB,KAVfroC,EAAQwb,UAAUtE,IAAIowB,GAWtB,GAG0BtnC,EAASA,EAAQwb,UAAUvW,SAASoiC,KACpE,CAEAc,YAAYnoC,EAASqoC,GACdroC,IAILA,EAAQwb,UAAUuH,OAAOqkB,IACzBpnC,EAAQ+6B,OAER/a,KAAKmoB,YAAY7tB,GAAuBta,IAmBxCggB,KAAK2F,gBAhBY,KACsB,QAAjC3lB,EAAQga,aAAa,SAKzBha,EAAQ6B,aAAa,iBAAiB,GACtC7B,EAAQ6B,aAAa,WAAY,MAEjCme,KAAKsoB,gBAAgBtoC,GAAS,GAE9BugB,GAAakB,QAAQzhB,EAAS0mC,GAAgB,CAC5C5mB,cAAeuoB,KAVfroC,EAAQwb,UAAUuH,OAAOukB,GAWzB,GAG0BtnC,EAASA,EAAQwb,UAAUvW,SAASoiC,KACpE,CAEAza,SAASxN,GACP,IAAK,CAAC4nB,GAAgBC,GAAiBC,GAAcC,IAAgBjtB,SAASkF,EAAM7hB,KAClF,OAGF6hB,EAAMyU,kBAENzU,EAAM+C,iBACN,MAAMoL,EAAS,CAAC0Z,GAAiBE,IAAgBjtB,SAASkF,EAAM7hB,KAC1DgrC,EAAoBzqB,GAAqBkC,KAAK6nB,eAAejhC,QAAO5G,IAAYqb,GAAWrb,KAAWof,EAAMpS,OAAQugB,GAAQ,GAE9Hgb,IACFA,EAAkB7V,MAAM,CACtB8V,eAAe,IAEjBb,GAAIthB,oBAAoBkiB,GAAmB1Y,OAE/C,CAEAgY,eAEE,OAAO5gB,GAAerU,KAAK60B,GAAqBznB,KAAKqS,QACvD,CAEA4V,iBACE,OAAOjoB,KAAK6nB,eAAej1B,MAAKzN,GAAS6a,KAAK+nB,cAAc5iC,MAAW,IACzE,CAEAyiC,sBAAsB1iC,EAAQiiB,GAC5BnH,KAAKyoB,yBAAyBvjC,EAAQ,OAAQ,WAE9C,IAAK,MAAMC,KAASgiB,EAClBnH,KAAK0oB,6BAA6BvjC,EAEtC,CAEAujC,6BAA6BvjC,GAC3BA,EAAQ6a,KAAK2oB,iBAAiBxjC,GAE9B,MAAMyjC,EAAW5oB,KAAK+nB,cAAc5iC,GAE9B0jC,EAAY7oB,KAAK8oB,iBAAiB3jC,GAExCA,EAAMtD,aAAa,gBAAiB+mC,GAEhCC,IAAc1jC,GAChB6a,KAAKyoB,yBAAyBI,EAAW,OAAQ,gBAG9CD,GACHzjC,EAAMtD,aAAa,WAAY,MAGjCme,KAAKyoB,yBAAyBtjC,EAAO,OAAQ,OAG7C6a,KAAK+oB,mCAAmC5jC,EAC1C,CAEA4jC,mCAAmC5jC,GACjC,MAAM6H,EAASsN,GAAuBnV,GAEjC6H,IAILgT,KAAKyoB,yBAAyBz7B,EAAQ,OAAQ,YAE1C7H,EAAMygC,IACR5lB,KAAKyoB,yBAAyBz7B,EAAQ,kBAAmB,IAAI7H,EAAMygC,MAEvE,CAEA0C,gBAAgBtoC,EAASgpC,GACvB,MAAMH,EAAY7oB,KAAK8oB,iBAAiB9oC,GAExC,IAAK6oC,EAAUrtB,UAAUvW,SAxMN,YAyMjB,OAGF,MAAM8hB,EAAS,CAAChN,EAAU2b,KACxB,MAAM11B,EAAUinB,GAAeC,QAAQnN,EAAU8uB,GAE7C7oC,GACFA,EAAQwb,UAAUuL,OAAO2O,EAAWsT,EACtC,EAGFjiB,EAnN6B,mBAmNIqgB,IACjCrgB,EAnN2B,iBAmNIugB,IAC/BuB,EAAUhnC,aAAa,gBAAiBmnC,EAC1C,CAEAP,yBAAyBzoC,EAASwC,EAAWpE,GACtC4B,EAAQ0b,aAAalZ,IACxBxC,EAAQ6B,aAAaW,EAAWpE,EAEpC,CAEA2pC,cAAczY,GACZ,OAAOA,EAAK9T,UAAUvW,SAASmiC,GACjC,CAGAuB,iBAAiBrZ,GACf,OAAOA,EAAKlI,QAAQqgB,IAAuBnY,EAAOrI,GAAeC,QAAQugB,GAAqBnY,EAChG,CAGAwZ,iBAAiBxZ,GACf,OAAOA,EAAKnU,QArOO,gCAqOoBmU,CACzC,CAGAzJ,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAO68B,GAAIthB,oBAAoBrG,MAErC,GAAsB,iBAAXqE,EAAX,CAIA,QAAqB7K,IAAjB1O,EAAKuZ,IAAyBA,EAAOlK,WAAW,MAAmB,gBAAXkK,EAC1D,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,IANL,CAOF,GACF,EAQF9D,GAAaY,GAAGrb,SAAU+gC,GAAsBW,IAAsB,SAAUpoB,GAC1E,CAAC,IAAK,QAAQlF,SAAS8F,KAAKoG,UAC9BhH,EAAM+C,iBAGJ9G,GAAW2E,OAIf2nB,GAAIthB,oBAAoBrG,MAAM6P,MAChC,IAKAtP,GAAaY,GAAG9gB,OAAQ0mC,IAAqB,KAC3C,IAAK,MAAM/mC,KAAWinB,GAAerU,KAAK80B,IACxCC,GAAIthB,oBAAoBrmB,EAC1B,IAMFqc,GAAmBsrB,IAYnB,MAEMniB,GAAY,YACZyjB,GAAkB,YAAYzjB,KAC9B0jB,GAAiB,WAAW1jB,KAC5B2jB,GAAgB,UAAU3jB,KAC1B4jB,GAAiB,WAAW5jB,KAC5B6jB,GAAa,OAAO7jB,KACpB8jB,GAAe,SAAS9jB,KACxB+jB,GAAa,OAAO/jB,KACpBgkB,GAAc,QAAQhkB,KAEtBikB,GAAkB,OAElBC,GAAkB,OAClBC,GAAqB,UACrBzlB,GAAc,CAClBkc,UAAW,UACXwJ,SAAU,UACVrJ,MAAO,UAEHtc,GAAU,CACdmc,WAAW,EACXwJ,UAAU,EACVrJ,MAAO,KAMT,MAAMsJ,WAAc3kB,GAClBR,YAAY1kB,EAASqkB,GACnBc,MAAMnlB,EAASqkB,GACfrE,KAAK4gB,SAAW,KAChB5gB,KAAK8pB,sBAAuB,EAC5B9pB,KAAK+pB,yBAA0B,EAE/B/pB,KAAKkhB,eACP,CAGWjd,qBACT,OAAOA,EACT,CAEWC,yBACT,OAAOA,EACT,CAEWzH,kBACT,MAlDS,OAmDX,CAGAoT,OACoBtP,GAAakB,QAAQzB,KAAKoF,SAAUmkB,IAExC1nB,mBAId7B,KAAKgqB,gBAEDhqB,KAAKqF,QAAQ+a,WACfpgB,KAAKoF,SAAS5J,UAAUtE,IArDN,QAgEpB8I,KAAKoF,SAAS5J,UAAUuH,OAAO0mB,IAG/B1tB,GAAOiE,KAAKoF,UAEZpF,KAAKoF,SAAS5J,UAAUtE,IAAIwyB,GAAiBC,IAE7C3pB,KAAK2F,gBAfY,KACf3F,KAAKoF,SAAS5J,UAAUuH,OAAO4mB,IAE/BppB,GAAakB,QAAQzB,KAAKoF,SAAUokB,IAEpCxpB,KAAKiqB,oBAAoB,GAUGjqB,KAAKoF,SAAUpF,KAAKqF,QAAQ+a,WAC5D,CAEAxQ,OACO5P,KAAKkqB,YAIQ3pB,GAAakB,QAAQzB,KAAKoF,SAAUikB,IAExCxnB,mBAad7B,KAAKoF,SAAS5J,UAAUtE,IAAIyyB,IAE5B3pB,KAAK2F,gBAXY,KACf3F,KAAKoF,SAAS5J,UAAUtE,IAAIuyB,IAG5BzpB,KAAKoF,SAAS5J,UAAUuH,OAAO4mB,GAAoBD,IAEnDnpB,GAAakB,QAAQzB,KAAKoF,SAAUkkB,GAAa,GAKrBtpB,KAAKoF,SAAUpF,KAAKqF,QAAQ+a,YAC5D,CAEA7a,UACEvF,KAAKgqB,gBAEDhqB,KAAKkqB,WACPlqB,KAAKoF,SAAS5J,UAAUuH,OAAO2mB,IAGjCvkB,MAAMI,SACR,CAEA2kB,UACE,OAAOlqB,KAAKoF,SAAS5J,UAAUvW,SAASykC,GAC1C,CAGAO,qBACOjqB,KAAKqF,QAAQukB,WAId5pB,KAAK8pB,sBAAwB9pB,KAAK+pB,0BAItC/pB,KAAK4gB,SAAW/iB,YAAW,KACzBmC,KAAK4P,MAAM,GACV5P,KAAKqF,QAAQkb,QAClB,CAEA4J,eAAe/qB,EAAOgrB,GACpB,OAAQhrB,EAAMqB,MACZ,IAAK,YACL,IAAK,WAEDT,KAAK8pB,qBAAuBM,EAC5B,MAGJ,IAAK,UACL,IAAK,WAEDpqB,KAAK+pB,wBAA0BK,EAKrC,GAAIA,EAGF,YAFApqB,KAAKgqB,gBAKP,MAAMxc,EAAcpO,EAAMU,cAEtBE,KAAKoF,WAAaoI,GAAexN,KAAKoF,SAASngB,SAASuoB,IAI5DxN,KAAKiqB,oBACP,CAEA/I,gBACE3gB,GAAaY,GAAGnB,KAAKoF,SAAU6jB,IAAiB7pB,GAASY,KAAKmqB,eAAe/qB,GAAO,KACpFmB,GAAaY,GAAGnB,KAAKoF,SAAU8jB,IAAgB9pB,GAASY,KAAKmqB,eAAe/qB,GAAO,KACnFmB,GAAaY,GAAGnB,KAAKoF,SAAU+jB,IAAe/pB,GAASY,KAAKmqB,eAAe/qB,GAAO,KAClFmB,GAAaY,GAAGnB,KAAKoF,SAAUgkB,IAAgBhqB,GAASY,KAAKmqB,eAAe/qB,GAAO,IACrF,CAEA4qB,gBACE/c,aAAajN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW,IAClB,CAGA/a,uBAAuBxB,GACrB,OAAOrE,KAAK4G,MAAK,WACf,MAAM9b,EAAO++B,GAAMxjB,oBAAoBrG,KAAMqE,GAE7C,GAAsB,iBAAXA,EAAqB,CAC9B,QAA4B,IAAjBvZ,EAAKuZ,GACd,MAAM,IAAIW,UAAU,oBAAoBX,MAG1CvZ,EAAKuZ,GAAQrE,KACf,CACF,GACF,ECxjKK,IAAuBzD,GDgkK9ByJ,GAAqB6jB,IAKrBxtB,GAAmBwtB,ICrkKWttB,GCK9B,WAC2B,GAAG1J,MAAM5U,KAChC6H,SAASsa,iBAAiB,+BAET7c,KAAI,SAAU8mC,GAC/B,OAAO,IAAI3J,GAAQ2J,EAAkB,CAAE9J,MAAO,CAAE1Q,KAAM,IAAKD,KAAM,MACnE,GACF,EDX6B,WAAvB9pB,SAASgX,WAAyBP,KACjCzW,SAASyF,iBAAiB,mBAAoBgR","sources":["webpack://pydata_sphinx_theme/webpack/bootstrap","webpack://pydata_sphinx_theme/webpack/runtime/define property getters","webpack://pydata_sphinx_theme/webpack/runtime/hasOwnProperty shorthand","webpack://pydata_sphinx_theme/webpack/runtime/make namespace object","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/enums.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/instanceOf.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/applyStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getBasePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/math.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/userAgent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isLayoutViewport.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getBoundingClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getLayoutRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/contains.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getComputedStyle.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isTableElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getParentNode.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getOffsetParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getMainAxisFromPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/within.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergePaddingObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getFreshSideObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/expandToHashMap.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/arrow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getVariation.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/computeStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/eventListeners.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositeVariationPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScrollBarX.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/listScrollParents.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/rectToClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getClippingRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getViewportRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/detectOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/flip.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeAutoPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/hide.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/offset.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/popperOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/preventOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getAltAxis.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getCompositeRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getHTMLElementScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/orderModifiers.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/createPopper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/debounce.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergeByName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper-lite.js","webpack://pydata_sphinx_theme/./node_modules/bootstrap/dist/js/bootstrap.esm.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/mixin.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/bootstrap.js"],"sourcesContent":["// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","export var top = 'top';\nexport var bottom = 'bottom';\nexport var right = 'right';\nexport var left = 'left';\nexport var auto = 'auto';\nexport var basePlacements = [top, bottom, right, left];\nexport var start = 'start';\nexport var end = 'end';\nexport var clippingParents = 'clippingParents';\nexport var viewport = 'viewport';\nexport var popper = 'popper';\nexport var reference = 'reference';\nexport var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {\n return acc.concat([placement + \"-\" + start, placement + \"-\" + end]);\n}, []);\nexport var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {\n return acc.concat([placement, placement + \"-\" + start, placement + \"-\" + end]);\n}, []); // modifiers that need to read the DOM\n\nexport var beforeRead = 'beforeRead';\nexport var read = 'read';\nexport var afterRead = 'afterRead'; // pure-logic modifiers\n\nexport var beforeMain = 'beforeMain';\nexport var main = 'main';\nexport var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)\n\nexport var beforeWrite = 'beforeWrite';\nexport var write = 'write';\nexport var afterWrite = 'afterWrite';\nexport var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];","export default function getNodeName(element) {\n return element ? (element.nodeName || '').toLowerCase() : null;\n}","export default function getWindow(node) {\n if (node == null) {\n return window;\n }\n\n if (node.toString() !== '[object Window]') {\n var ownerDocument = node.ownerDocument;\n return ownerDocument ? ownerDocument.defaultView || window : window;\n }\n\n return node;\n}","import getWindow from \"./getWindow.js\";\n\nfunction isElement(node) {\n var OwnElement = getWindow(node).Element;\n return node instanceof OwnElement || node instanceof Element;\n}\n\nfunction isHTMLElement(node) {\n var OwnElement = getWindow(node).HTMLElement;\n return node instanceof OwnElement || node instanceof HTMLElement;\n}\n\nfunction isShadowRoot(node) {\n // IE 11 has no ShadowRoot\n if (typeof ShadowRoot === 'undefined') {\n return false;\n }\n\n var OwnElement = getWindow(node).ShadowRoot;\n return node instanceof OwnElement || node instanceof ShadowRoot;\n}\n\nexport { isElement, isHTMLElement, isShadowRoot };","import getNodeName from \"../dom-utils/getNodeName.js\";\nimport { isHTMLElement } from \"../dom-utils/instanceOf.js\"; // This modifier takes the styles prepared by the `computeStyles` modifier\n// and applies them to the HTMLElements such as popper and arrow\n\nfunction applyStyles(_ref) {\n var state = _ref.state;\n Object.keys(state.elements).forEach(function (name) {\n var style = state.styles[name] || {};\n var attributes = state.attributes[name] || {};\n var element = state.elements[name]; // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n } // Flow doesn't support to extend this property, but it's the most\n // effective way to apply styles to an HTMLElement\n // $FlowFixMe[cannot-write]\n\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (name) {\n var value = attributes[name];\n\n if (value === false) {\n element.removeAttribute(name);\n } else {\n element.setAttribute(name, value === true ? '' : value);\n }\n });\n });\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state;\n var initialStyles = {\n popper: {\n position: state.options.strategy,\n left: '0',\n top: '0',\n margin: '0'\n },\n arrow: {\n position: 'absolute'\n },\n reference: {}\n };\n Object.assign(state.elements.popper.style, initialStyles.popper);\n state.styles = initialStyles;\n\n if (state.elements.arrow) {\n Object.assign(state.elements.arrow.style, initialStyles.arrow);\n }\n\n return function () {\n Object.keys(state.elements).forEach(function (name) {\n var element = state.elements[name];\n var attributes = state.attributes[name] || {};\n var styleProperties = Object.keys(state.styles.hasOwnProperty(name) ? state.styles[name] : initialStyles[name]); // Set all values to an empty string to unset them\n\n var style = styleProperties.reduce(function (style, property) {\n style[property] = '';\n return style;\n }, {}); // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n }\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (attribute) {\n element.removeAttribute(attribute);\n });\n });\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'applyStyles',\n enabled: true,\n phase: 'write',\n fn: applyStyles,\n effect: effect,\n requires: ['computeStyles']\n};","import { auto } from \"../enums.js\";\nexport default function getBasePlacement(placement) {\n return placement.split('-')[0];\n}","export var max = Math.max;\nexport var min = Math.min;\nexport var round = Math.round;","export default function getUAString() {\n var uaData = navigator.userAgentData;\n\n if (uaData != null && uaData.brands && Array.isArray(uaData.brands)) {\n return uaData.brands.map(function (item) {\n return item.brand + \"/\" + item.version;\n }).join(' ');\n }\n\n return navigator.userAgent;\n}","import getUAString from \"../utils/userAgent.js\";\nexport default function isLayoutViewport() {\n return !/^((?!chrome|android).)*safari/i.test(getUAString());\n}","import { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport { round } from \"../utils/math.js\";\nimport getWindow from \"./getWindow.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getBoundingClientRect(element, includeScale, isFixedStrategy) {\n if (includeScale === void 0) {\n includeScale = false;\n }\n\n if (isFixedStrategy === void 0) {\n isFixedStrategy = false;\n }\n\n var clientRect = element.getBoundingClientRect();\n var scaleX = 1;\n var scaleY = 1;\n\n if (includeScale && isHTMLElement(element)) {\n scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;\n scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;\n }\n\n var _ref = isElement(element) ? getWindow(element) : window,\n visualViewport = _ref.visualViewport;\n\n var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;\n var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;\n var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;\n var width = clientRect.width / scaleX;\n var height = clientRect.height / scaleY;\n return {\n width: width,\n height: height,\n top: y,\n right: x + width,\n bottom: y + height,\n left: x,\n x: x,\n y: y\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\"; // Returns the layout rect of an element relative to its offsetParent. Layout\n// means it doesn't take into account transforms.\n\nexport default function getLayoutRect(element) {\n var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.\n // Fixes https://github.com/popperjs/popper-core/issues/1223\n\n var width = element.offsetWidth;\n var height = element.offsetHeight;\n\n if (Math.abs(clientRect.width - width) <= 1) {\n width = clientRect.width;\n }\n\n if (Math.abs(clientRect.height - height) <= 1) {\n height = clientRect.height;\n }\n\n return {\n x: element.offsetLeft,\n y: element.offsetTop,\n width: width,\n height: height\n };\n}","import { isShadowRoot } from \"./instanceOf.js\";\nexport default function contains(parent, child) {\n var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method\n\n if (parent.contains(child)) {\n return true;\n } // then fallback to custom implementation with Shadow DOM support\n else if (rootNode && isShadowRoot(rootNode)) {\n var next = child;\n\n do {\n if (next && parent.isSameNode(next)) {\n return true;\n } // $FlowFixMe[prop-missing]: need a better way to handle this...\n\n\n next = next.parentNode || next.host;\n } while (next);\n } // Give up, the result is false\n\n\n return false;\n}","import getWindow from \"./getWindow.js\";\nexport default function getComputedStyle(element) {\n return getWindow(element).getComputedStyle(element);\n}","import getNodeName from \"./getNodeName.js\";\nexport default function isTableElement(element) {\n return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;\n}","import { isElement } from \"./instanceOf.js\";\nexport default function getDocumentElement(element) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]\n element.document) || window.document).documentElement;\n}","import getNodeName from \"./getNodeName.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport { isShadowRoot } from \"./instanceOf.js\";\nexport default function getParentNode(element) {\n if (getNodeName(element) === 'html') {\n return element;\n }\n\n return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle\n // $FlowFixMe[incompatible-return]\n // $FlowFixMe[prop-missing]\n element.assignedSlot || // step into the shadow DOM of the parent of a slotted node\n element.parentNode || ( // DOM Element detected\n isShadowRoot(element) ? element.host : null) || // ShadowRoot detected\n // $FlowFixMe[incompatible-call]: HTMLElement is a Node\n getDocumentElement(element) // fallback\n\n );\n}","import getWindow from \"./getWindow.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isHTMLElement, isShadowRoot } from \"./instanceOf.js\";\nimport isTableElement from \"./isTableElement.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getUAString from \"../utils/userAgent.js\";\n\nfunction getTrueOffsetParent(element) {\n if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837\n getComputedStyle(element).position === 'fixed') {\n return null;\n }\n\n return element.offsetParent;\n} // `.offsetParent` reports `null` for fixed elements, while absolute elements\n// return the containing block\n\n\nfunction getContainingBlock(element) {\n var isFirefox = /firefox/i.test(getUAString());\n var isIE = /Trident/i.test(getUAString());\n\n if (isIE && isHTMLElement(element)) {\n // In IE 9, 10 and 11 fixed elements containing block is always established by the viewport\n var elementCss = getComputedStyle(element);\n\n if (elementCss.position === 'fixed') {\n return null;\n }\n }\n\n var currentNode = getParentNode(element);\n\n if (isShadowRoot(currentNode)) {\n currentNode = currentNode.host;\n }\n\n while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {\n var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that\n // create a containing block.\n // https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block\n\n if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {\n return currentNode;\n } else {\n currentNode = currentNode.parentNode;\n }\n }\n\n return null;\n} // Gets the closest ancestor positioned element. Handles some edge cases,\n// such as table ancestors and cross browser bugs.\n\n\nexport default function getOffsetParent(element) {\n var window = getWindow(element);\n var offsetParent = getTrueOffsetParent(element);\n\n while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {\n offsetParent = getTrueOffsetParent(offsetParent);\n }\n\n if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {\n return window;\n }\n\n return offsetParent || getContainingBlock(element) || window;\n}","export default function getMainAxisFromPlacement(placement) {\n return ['top', 'bottom'].indexOf(placement) >= 0 ? 'x' : 'y';\n}","import { max as mathMax, min as mathMin } from \"./math.js\";\nexport function within(min, value, max) {\n return mathMax(min, mathMin(value, max));\n}\nexport function withinMaxClamp(min, value, max) {\n var v = within(min, value, max);\n return v > max ? max : v;\n}","import getFreshSideObject from \"./getFreshSideObject.js\";\nexport default function mergePaddingObject(paddingObject) {\n return Object.assign({}, getFreshSideObject(), paddingObject);\n}","export default function getFreshSideObject() {\n return {\n top: 0,\n right: 0,\n bottom: 0,\n left: 0\n };\n}","export default function expandToHashMap(value, keys) {\n return keys.reduce(function (hashMap, key) {\n hashMap[key] = value;\n return hashMap;\n }, {});\n}","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport contains from \"../dom-utils/contains.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport { within } from \"../utils/within.js\";\nimport mergePaddingObject from \"../utils/mergePaddingObject.js\";\nimport expandToHashMap from \"../utils/expandToHashMap.js\";\nimport { left, right, basePlacements, top, bottom } from \"../enums.js\";\nimport { isHTMLElement } from \"../dom-utils/instanceOf.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar toPaddingObject = function toPaddingObject(padding, state) {\n padding = typeof padding === 'function' ? padding(Object.assign({}, state.rects, {\n placement: state.placement\n })) : padding;\n return mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n};\n\nfunction arrow(_ref) {\n var _state$modifiersData$;\n\n var state = _ref.state,\n name = _ref.name,\n options = _ref.options;\n var arrowElement = state.elements.arrow;\n var popperOffsets = state.modifiersData.popperOffsets;\n var basePlacement = getBasePlacement(state.placement);\n var axis = getMainAxisFromPlacement(basePlacement);\n var isVertical = [left, right].indexOf(basePlacement) >= 0;\n var len = isVertical ? 'height' : 'width';\n\n if (!arrowElement || !popperOffsets) {\n return;\n }\n\n var paddingObject = toPaddingObject(options.padding, state);\n var arrowRect = getLayoutRect(arrowElement);\n var minProp = axis === 'y' ? top : left;\n var maxProp = axis === 'y' ? bottom : right;\n var endDiff = state.rects.reference[len] + state.rects.reference[axis] - popperOffsets[axis] - state.rects.popper[len];\n var startDiff = popperOffsets[axis] - state.rects.reference[axis];\n var arrowOffsetParent = getOffsetParent(arrowElement);\n var clientSize = arrowOffsetParent ? axis === 'y' ? arrowOffsetParent.clientHeight || 0 : arrowOffsetParent.clientWidth || 0 : 0;\n var centerToReference = endDiff / 2 - startDiff / 2; // Make sure the arrow doesn't overflow the popper if the center point is\n // outside of the popper bounds\n\n var min = paddingObject[minProp];\n var max = clientSize - arrowRect[len] - paddingObject[maxProp];\n var center = clientSize / 2 - arrowRect[len] / 2 + centerToReference;\n var offset = within(min, center, max); // Prevents breaking syntax highlighting...\n\n var axisProp = axis;\n state.modifiersData[name] = (_state$modifiersData$ = {}, _state$modifiersData$[axisProp] = offset, _state$modifiersData$.centerOffset = offset - center, _state$modifiersData$);\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state,\n options = _ref2.options;\n var _options$element = options.element,\n arrowElement = _options$element === void 0 ? '[data-popper-arrow]' : _options$element;\n\n if (arrowElement == null) {\n return;\n } // CSS selector\n\n\n if (typeof arrowElement === 'string') {\n arrowElement = state.elements.popper.querySelector(arrowElement);\n\n if (!arrowElement) {\n return;\n }\n }\n\n if (process.env.NODE_ENV !== \"production\") {\n if (!isHTMLElement(arrowElement)) {\n console.error(['Popper: \"arrow\" element must be an HTMLElement (not an SVGElement).', 'To use an SVG arrow, wrap it in an HTMLElement that will be used as', 'the arrow.'].join(' '));\n }\n }\n\n if (!contains(state.elements.popper, arrowElement)) {\n if (process.env.NODE_ENV !== \"production\") {\n console.error(['Popper: \"arrow\" modifier\\'s `element` must be a child of the popper', 'element.'].join(' '));\n }\n\n return;\n }\n\n state.elements.arrow = arrowElement;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'arrow',\n enabled: true,\n phase: 'main',\n fn: arrow,\n effect: effect,\n requires: ['popperOffsets'],\n requiresIfExists: ['preventOverflow']\n};","export default function getVariation(placement) {\n return placement.split('-')[1];\n}","import { top, left, right, bottom, end } from \"../enums.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getWindow from \"../dom-utils/getWindow.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getComputedStyle from \"../dom-utils/getComputedStyle.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport { round } from \"../utils/math.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar unsetSides = {\n top: 'auto',\n right: 'auto',\n bottom: 'auto',\n left: 'auto'\n}; // Round the offsets to the nearest suitable subpixel based on the DPR.\n// Zooming can change the DPR, but it seems to report a value that will\n// cleanly divide the values into the appropriate subpixels.\n\nfunction roundOffsetsByDPR(_ref, win) {\n var x = _ref.x,\n y = _ref.y;\n var dpr = win.devicePixelRatio || 1;\n return {\n x: round(x * dpr) / dpr || 0,\n y: round(y * dpr) / dpr || 0\n };\n}\n\nexport function mapToStyles(_ref2) {\n var _Object$assign2;\n\n var popper = _ref2.popper,\n popperRect = _ref2.popperRect,\n placement = _ref2.placement,\n variation = _ref2.variation,\n offsets = _ref2.offsets,\n position = _ref2.position,\n gpuAcceleration = _ref2.gpuAcceleration,\n adaptive = _ref2.adaptive,\n roundOffsets = _ref2.roundOffsets,\n isFixed = _ref2.isFixed;\n var _offsets$x = offsets.x,\n x = _offsets$x === void 0 ? 0 : _offsets$x,\n _offsets$y = offsets.y,\n y = _offsets$y === void 0 ? 0 : _offsets$y;\n\n var _ref3 = typeof roundOffsets === 'function' ? roundOffsets({\n x: x,\n y: y\n }) : {\n x: x,\n y: y\n };\n\n x = _ref3.x;\n y = _ref3.y;\n var hasX = offsets.hasOwnProperty('x');\n var hasY = offsets.hasOwnProperty('y');\n var sideX = left;\n var sideY = top;\n var win = window;\n\n if (adaptive) {\n var offsetParent = getOffsetParent(popper);\n var heightProp = 'clientHeight';\n var widthProp = 'clientWidth';\n\n if (offsetParent === getWindow(popper)) {\n offsetParent = getDocumentElement(popper);\n\n if (getComputedStyle(offsetParent).position !== 'static' && position === 'absolute') {\n heightProp = 'scrollHeight';\n widthProp = 'scrollWidth';\n }\n } // $FlowFixMe[incompatible-cast]: force type refinement, we compare offsetParent with window above, but Flow doesn't detect it\n\n\n offsetParent = offsetParent;\n\n if (placement === top || (placement === left || placement === right) && variation === end) {\n sideY = bottom;\n var offsetY = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.height : // $FlowFixMe[prop-missing]\n offsetParent[heightProp];\n y -= offsetY - popperRect.height;\n y *= gpuAcceleration ? 1 : -1;\n }\n\n if (placement === left || (placement === top || placement === bottom) && variation === end) {\n sideX = right;\n var offsetX = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.width : // $FlowFixMe[prop-missing]\n offsetParent[widthProp];\n x -= offsetX - popperRect.width;\n x *= gpuAcceleration ? 1 : -1;\n }\n }\n\n var commonStyles = Object.assign({\n position: position\n }, adaptive && unsetSides);\n\n var _ref4 = roundOffsets === true ? roundOffsetsByDPR({\n x: x,\n y: y\n }, getWindow(popper)) : {\n x: x,\n y: y\n };\n\n x = _ref4.x;\n y = _ref4.y;\n\n if (gpuAcceleration) {\n var _Object$assign;\n\n return Object.assign({}, commonStyles, (_Object$assign = {}, _Object$assign[sideY] = hasY ? '0' : '', _Object$assign[sideX] = hasX ? '0' : '', _Object$assign.transform = (win.devicePixelRatio || 1) <= 1 ? \"translate(\" + x + \"px, \" + y + \"px)\" : \"translate3d(\" + x + \"px, \" + y + \"px, 0)\", _Object$assign));\n }\n\n return Object.assign({}, commonStyles, (_Object$assign2 = {}, _Object$assign2[sideY] = hasY ? y + \"px\" : '', _Object$assign2[sideX] = hasX ? x + \"px\" : '', _Object$assign2.transform = '', _Object$assign2));\n}\n\nfunction computeStyles(_ref5) {\n var state = _ref5.state,\n options = _ref5.options;\n var _options$gpuAccelerat = options.gpuAcceleration,\n gpuAcceleration = _options$gpuAccelerat === void 0 ? true : _options$gpuAccelerat,\n _options$adaptive = options.adaptive,\n adaptive = _options$adaptive === void 0 ? true : _options$adaptive,\n _options$roundOffsets = options.roundOffsets,\n roundOffsets = _options$roundOffsets === void 0 ? true : _options$roundOffsets;\n\n if (process.env.NODE_ENV !== \"production\") {\n var transitionProperty = getComputedStyle(state.elements.popper).transitionProperty || '';\n\n if (adaptive && ['transform', 'top', 'right', 'bottom', 'left'].some(function (property) {\n return transitionProperty.indexOf(property) >= 0;\n })) {\n console.warn(['Popper: Detected CSS transitions on at least one of the following', 'CSS properties: \"transform\", \"top\", \"right\", \"bottom\", \"left\".', '\\n\\n', 'Disable the \"computeStyles\" modifier\\'s `adaptive` option to allow', 'for smooth transitions, or remove these properties from the CSS', 'transition declaration on the popper element if only transitioning', 'opacity or background-color for example.', '\\n\\n', 'We recommend using the popper element as a wrapper around an inner', 'element that can have any CSS property transitioned for animations.'].join(' '));\n }\n }\n\n var commonStyles = {\n placement: getBasePlacement(state.placement),\n variation: getVariation(state.placement),\n popper: state.elements.popper,\n popperRect: state.rects.popper,\n gpuAcceleration: gpuAcceleration,\n isFixed: state.options.strategy === 'fixed'\n };\n\n if (state.modifiersData.popperOffsets != null) {\n state.styles.popper = Object.assign({}, state.styles.popper, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.popperOffsets,\n position: state.options.strategy,\n adaptive: adaptive,\n roundOffsets: roundOffsets\n })));\n }\n\n if (state.modifiersData.arrow != null) {\n state.styles.arrow = Object.assign({}, state.styles.arrow, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.arrow,\n position: 'absolute',\n adaptive: false,\n roundOffsets: roundOffsets\n })));\n }\n\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-placement': state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'computeStyles',\n enabled: true,\n phase: 'beforeWrite',\n fn: computeStyles,\n data: {}\n};","import getWindow from \"../dom-utils/getWindow.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar passive = {\n passive: true\n};\n\nfunction effect(_ref) {\n var state = _ref.state,\n instance = _ref.instance,\n options = _ref.options;\n var _options$scroll = options.scroll,\n scroll = _options$scroll === void 0 ? true : _options$scroll,\n _options$resize = options.resize,\n resize = _options$resize === void 0 ? true : _options$resize;\n var window = getWindow(state.elements.popper);\n var scrollParents = [].concat(state.scrollParents.reference, state.scrollParents.popper);\n\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.addEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.addEventListener('resize', instance.update, passive);\n }\n\n return function () {\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.removeEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.removeEventListener('resize', instance.update, passive);\n }\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'eventListeners',\n enabled: true,\n phase: 'write',\n fn: function fn() {},\n effect: effect,\n data: {}\n};","var hash = {\n left: 'right',\n right: 'left',\n bottom: 'top',\n top: 'bottom'\n};\nexport default function getOppositePlacement(placement) {\n return placement.replace(/left|right|bottom|top/g, function (matched) {\n return hash[matched];\n });\n}","var hash = {\n start: 'end',\n end: 'start'\n};\nexport default function getOppositeVariationPlacement(placement) {\n return placement.replace(/start|end/g, function (matched) {\n return hash[matched];\n });\n}","import getWindow from \"./getWindow.js\";\nexport default function getWindowScroll(node) {\n var win = getWindow(node);\n var scrollLeft = win.pageXOffset;\n var scrollTop = win.pageYOffset;\n return {\n scrollLeft: scrollLeft,\n scrollTop: scrollTop\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nexport default function getWindowScrollBarX(element) {\n // If has a CSS width greater than the viewport, then this will be\n // incorrect for RTL.\n // Popper 1 is broken in this case and never had a bug report so let's assume\n // it's not an issue. I don't think anyone ever specifies width on \n // anyway.\n // Browsers where the left scrollbar doesn't cause an issue report `0` for\n // this (e.g. Edge 2019, IE11, Safari)\n return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;\n}","import getComputedStyle from \"./getComputedStyle.js\";\nexport default function isScrollParent(element) {\n // Firefox wants us to check `-x` and `-y` variations as well\n var _getComputedStyle = getComputedStyle(element),\n overflow = _getComputedStyle.overflow,\n overflowX = _getComputedStyle.overflowX,\n overflowY = _getComputedStyle.overflowY;\n\n return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);\n}","import getParentNode from \"./getParentNode.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nexport default function getScrollParent(node) {\n if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return node.ownerDocument.body;\n }\n\n if (isHTMLElement(node) && isScrollParent(node)) {\n return node;\n }\n\n return getScrollParent(getParentNode(node));\n}","import getScrollParent from \"./getScrollParent.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getWindow from \"./getWindow.js\";\nimport isScrollParent from \"./isScrollParent.js\";\n/*\ngiven a DOM element, return the list of all scroll parents, up the list of ancesors\nuntil we get to the top window object. This list is what we attach scroll listeners\nto, because if any of these parent elements scroll, we'll need to re-calculate the\nreference element's position.\n*/\n\nexport default function listScrollParents(element, list) {\n var _element$ownerDocumen;\n\n if (list === void 0) {\n list = [];\n }\n\n var scrollParent = getScrollParent(element);\n var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);\n var win = getWindow(scrollParent);\n var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;\n var updatedList = list.concat(target);\n return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here\n updatedList.concat(listScrollParents(getParentNode(target)));\n}","export default function rectToClientRect(rect) {\n return Object.assign({}, rect, {\n left: rect.x,\n top: rect.y,\n right: rect.x + rect.width,\n bottom: rect.y + rect.height\n });\n}","import { viewport } from \"../enums.js\";\nimport getViewportRect from \"./getViewportRect.js\";\nimport getDocumentRect from \"./getDocumentRect.js\";\nimport listScrollParents from \"./listScrollParents.js\";\nimport getOffsetParent from \"./getOffsetParent.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport contains from \"./contains.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport rectToClientRect from \"../utils/rectToClientRect.js\";\nimport { max, min } from \"../utils/math.js\";\n\nfunction getInnerBoundingClientRect(element, strategy) {\n var rect = getBoundingClientRect(element, false, strategy === 'fixed');\n rect.top = rect.top + element.clientTop;\n rect.left = rect.left + element.clientLeft;\n rect.bottom = rect.top + element.clientHeight;\n rect.right = rect.left + element.clientWidth;\n rect.width = element.clientWidth;\n rect.height = element.clientHeight;\n rect.x = rect.left;\n rect.y = rect.top;\n return rect;\n}\n\nfunction getClientRectFromMixedType(element, clippingParent, strategy) {\n return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));\n} // A \"clipping parent\" is an overflowable container with the characteristic of\n// clipping (or hiding) overflowing elements with a position different from\n// `initial`\n\n\nfunction getClippingParents(element) {\n var clippingParents = listScrollParents(getParentNode(element));\n var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;\n var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;\n\n if (!isElement(clipperElement)) {\n return [];\n } // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414\n\n\n return clippingParents.filter(function (clippingParent) {\n return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';\n });\n} // Gets the maximum area that the element is visible in due to any number of\n// clipping parents\n\n\nexport default function getClippingRect(element, boundary, rootBoundary, strategy) {\n var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);\n var clippingParents = [].concat(mainClippingParents, [rootBoundary]);\n var firstClippingParent = clippingParents[0];\n var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {\n var rect = getClientRectFromMixedType(element, clippingParent, strategy);\n accRect.top = max(rect.top, accRect.top);\n accRect.right = min(rect.right, accRect.right);\n accRect.bottom = min(rect.bottom, accRect.bottom);\n accRect.left = max(rect.left, accRect.left);\n return accRect;\n }, getClientRectFromMixedType(element, firstClippingParent, strategy));\n clippingRect.width = clippingRect.right - clippingRect.left;\n clippingRect.height = clippingRect.bottom - clippingRect.top;\n clippingRect.x = clippingRect.left;\n clippingRect.y = clippingRect.top;\n return clippingRect;\n}","import getWindow from \"./getWindow.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getViewportRect(element, strategy) {\n var win = getWindow(element);\n var html = getDocumentElement(element);\n var visualViewport = win.visualViewport;\n var width = html.clientWidth;\n var height = html.clientHeight;\n var x = 0;\n var y = 0;\n\n if (visualViewport) {\n width = visualViewport.width;\n height = visualViewport.height;\n var layoutViewport = isLayoutViewport();\n\n if (layoutViewport || !layoutViewport && strategy === 'fixed') {\n x = visualViewport.offsetLeft;\n y = visualViewport.offsetTop;\n }\n }\n\n return {\n width: width,\n height: height,\n x: x + getWindowScrollBarX(element),\n y: y\n };\n}","import getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nimport { max } from \"../utils/math.js\"; // Gets the entire size of the scrollable document area, even extending outside\n// of the `` and `` rect bounds if horizontally scrollable\n\nexport default function getDocumentRect(element) {\n var _element$ownerDocumen;\n\n var html = getDocumentElement(element);\n var winScroll = getWindowScroll(element);\n var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;\n var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);\n var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);\n var x = -winScroll.scrollLeft + getWindowScrollBarX(element);\n var y = -winScroll.scrollTop;\n\n if (getComputedStyle(body || html).direction === 'rtl') {\n x += max(html.clientWidth, body ? body.clientWidth : 0) - width;\n }\n\n return {\n width: width,\n height: height,\n x: x,\n y: y\n };\n}","import getBasePlacement from \"./getBasePlacement.js\";\nimport getVariation from \"./getVariation.js\";\nimport getMainAxisFromPlacement from \"./getMainAxisFromPlacement.js\";\nimport { top, right, bottom, left, start, end } from \"../enums.js\";\nexport default function computeOffsets(_ref) {\n var reference = _ref.reference,\n element = _ref.element,\n placement = _ref.placement;\n var basePlacement = placement ? getBasePlacement(placement) : null;\n var variation = placement ? getVariation(placement) : null;\n var commonX = reference.x + reference.width / 2 - element.width / 2;\n var commonY = reference.y + reference.height / 2 - element.height / 2;\n var offsets;\n\n switch (basePlacement) {\n case top:\n offsets = {\n x: commonX,\n y: reference.y - element.height\n };\n break;\n\n case bottom:\n offsets = {\n x: commonX,\n y: reference.y + reference.height\n };\n break;\n\n case right:\n offsets = {\n x: reference.x + reference.width,\n y: commonY\n };\n break;\n\n case left:\n offsets = {\n x: reference.x - element.width,\n y: commonY\n };\n break;\n\n default:\n offsets = {\n x: reference.x,\n y: reference.y\n };\n }\n\n var mainAxis = basePlacement ? getMainAxisFromPlacement(basePlacement) : null;\n\n if (mainAxis != null) {\n var len = mainAxis === 'y' ? 'height' : 'width';\n\n switch (variation) {\n case start:\n offsets[mainAxis] = offsets[mainAxis] - (reference[len] / 2 - element[len] / 2);\n break;\n\n case end:\n offsets[mainAxis] = offsets[mainAxis] + (reference[len] / 2 - element[len] / 2);\n break;\n\n default:\n }\n }\n\n return offsets;\n}","import getClippingRect from \"../dom-utils/getClippingRect.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getBoundingClientRect from \"../dom-utils/getBoundingClientRect.js\";\nimport computeOffsets from \"./computeOffsets.js\";\nimport rectToClientRect from \"./rectToClientRect.js\";\nimport { clippingParents, reference, popper, bottom, top, right, basePlacements, viewport } from \"../enums.js\";\nimport { isElement } from \"../dom-utils/instanceOf.js\";\nimport mergePaddingObject from \"./mergePaddingObject.js\";\nimport expandToHashMap from \"./expandToHashMap.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport default function detectOverflow(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n _options$placement = _options.placement,\n placement = _options$placement === void 0 ? state.placement : _options$placement,\n _options$strategy = _options.strategy,\n strategy = _options$strategy === void 0 ? state.strategy : _options$strategy,\n _options$boundary = _options.boundary,\n boundary = _options$boundary === void 0 ? clippingParents : _options$boundary,\n _options$rootBoundary = _options.rootBoundary,\n rootBoundary = _options$rootBoundary === void 0 ? viewport : _options$rootBoundary,\n _options$elementConte = _options.elementContext,\n elementContext = _options$elementConte === void 0 ? popper : _options$elementConte,\n _options$altBoundary = _options.altBoundary,\n altBoundary = _options$altBoundary === void 0 ? false : _options$altBoundary,\n _options$padding = _options.padding,\n padding = _options$padding === void 0 ? 0 : _options$padding;\n var paddingObject = mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n var altContext = elementContext === popper ? reference : popper;\n var popperRect = state.rects.popper;\n var element = state.elements[altBoundary ? altContext : elementContext];\n var clippingClientRect = getClippingRect(isElement(element) ? element : element.contextElement || getDocumentElement(state.elements.popper), boundary, rootBoundary, strategy);\n var referenceClientRect = getBoundingClientRect(state.elements.reference);\n var popperOffsets = computeOffsets({\n reference: referenceClientRect,\n element: popperRect,\n strategy: 'absolute',\n placement: placement\n });\n var popperClientRect = rectToClientRect(Object.assign({}, popperRect, popperOffsets));\n var elementClientRect = elementContext === popper ? popperClientRect : referenceClientRect; // positive = overflowing the clipping rect\n // 0 or negative = within the clipping rect\n\n var overflowOffsets = {\n top: clippingClientRect.top - elementClientRect.top + paddingObject.top,\n bottom: elementClientRect.bottom - clippingClientRect.bottom + paddingObject.bottom,\n left: clippingClientRect.left - elementClientRect.left + paddingObject.left,\n right: elementClientRect.right - clippingClientRect.right + paddingObject.right\n };\n var offsetData = state.modifiersData.offset; // Offsets can be applied only to the popper element\n\n if (elementContext === popper && offsetData) {\n var offset = offsetData[placement];\n Object.keys(overflowOffsets).forEach(function (key) {\n var multiply = [right, bottom].indexOf(key) >= 0 ? 1 : -1;\n var axis = [top, bottom].indexOf(key) >= 0 ? 'y' : 'x';\n overflowOffsets[key] += offset[axis] * multiply;\n });\n }\n\n return overflowOffsets;\n}","import getOppositePlacement from \"../utils/getOppositePlacement.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getOppositeVariationPlacement from \"../utils/getOppositeVariationPlacement.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport computeAutoPlacement from \"../utils/computeAutoPlacement.js\";\nimport { bottom, top, start, right, left, auto } from \"../enums.js\";\nimport getVariation from \"../utils/getVariation.js\"; // eslint-disable-next-line import/no-unused-modules\n\nfunction getExpandedFallbackPlacements(placement) {\n if (getBasePlacement(placement) === auto) {\n return [];\n }\n\n var oppositePlacement = getOppositePlacement(placement);\n return [getOppositeVariationPlacement(placement), oppositePlacement, getOppositeVariationPlacement(oppositePlacement)];\n}\n\nfunction flip(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n\n if (state.modifiersData[name]._skip) {\n return;\n }\n\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? true : _options$altAxis,\n specifiedFallbackPlacements = options.fallbackPlacements,\n padding = options.padding,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n _options$flipVariatio = options.flipVariations,\n flipVariations = _options$flipVariatio === void 0 ? true : _options$flipVariatio,\n allowedAutoPlacements = options.allowedAutoPlacements;\n var preferredPlacement = state.options.placement;\n var basePlacement = getBasePlacement(preferredPlacement);\n var isBasePlacement = basePlacement === preferredPlacement;\n var fallbackPlacements = specifiedFallbackPlacements || (isBasePlacement || !flipVariations ? [getOppositePlacement(preferredPlacement)] : getExpandedFallbackPlacements(preferredPlacement));\n var placements = [preferredPlacement].concat(fallbackPlacements).reduce(function (acc, placement) {\n return acc.concat(getBasePlacement(placement) === auto ? computeAutoPlacement(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n flipVariations: flipVariations,\n allowedAutoPlacements: allowedAutoPlacements\n }) : placement);\n }, []);\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var checksMap = new Map();\n var makeFallbackChecks = true;\n var firstFittingPlacement = placements[0];\n\n for (var i = 0; i < placements.length; i++) {\n var placement = placements[i];\n\n var _basePlacement = getBasePlacement(placement);\n\n var isStartVariation = getVariation(placement) === start;\n var isVertical = [top, bottom].indexOf(_basePlacement) >= 0;\n var len = isVertical ? 'width' : 'height';\n var overflow = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n altBoundary: altBoundary,\n padding: padding\n });\n var mainVariationSide = isVertical ? isStartVariation ? right : left : isStartVariation ? bottom : top;\n\n if (referenceRect[len] > popperRect[len]) {\n mainVariationSide = getOppositePlacement(mainVariationSide);\n }\n\n var altVariationSide = getOppositePlacement(mainVariationSide);\n var checks = [];\n\n if (checkMainAxis) {\n checks.push(overflow[_basePlacement] <= 0);\n }\n\n if (checkAltAxis) {\n checks.push(overflow[mainVariationSide] <= 0, overflow[altVariationSide] <= 0);\n }\n\n if (checks.every(function (check) {\n return check;\n })) {\n firstFittingPlacement = placement;\n makeFallbackChecks = false;\n break;\n }\n\n checksMap.set(placement, checks);\n }\n\n if (makeFallbackChecks) {\n // `2` may be desired in some cases – research later\n var numberOfChecks = flipVariations ? 3 : 1;\n\n var _loop = function _loop(_i) {\n var fittingPlacement = placements.find(function (placement) {\n var checks = checksMap.get(placement);\n\n if (checks) {\n return checks.slice(0, _i).every(function (check) {\n return check;\n });\n }\n });\n\n if (fittingPlacement) {\n firstFittingPlacement = fittingPlacement;\n return \"break\";\n }\n };\n\n for (var _i = numberOfChecks; _i > 0; _i--) {\n var _ret = _loop(_i);\n\n if (_ret === \"break\") break;\n }\n }\n\n if (state.placement !== firstFittingPlacement) {\n state.modifiersData[name]._skip = true;\n state.placement = firstFittingPlacement;\n state.reset = true;\n }\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'flip',\n enabled: true,\n phase: 'main',\n fn: flip,\n requiresIfExists: ['offset'],\n data: {\n _skip: false\n }\n};","import getVariation from \"./getVariation.js\";\nimport { variationPlacements, basePlacements, placements as allPlacements } from \"../enums.js\";\nimport detectOverflow from \"./detectOverflow.js\";\nimport getBasePlacement from \"./getBasePlacement.js\";\nexport default function computeAutoPlacement(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n placement = _options.placement,\n boundary = _options.boundary,\n rootBoundary = _options.rootBoundary,\n padding = _options.padding,\n flipVariations = _options.flipVariations,\n _options$allowedAutoP = _options.allowedAutoPlacements,\n allowedAutoPlacements = _options$allowedAutoP === void 0 ? allPlacements : _options$allowedAutoP;\n var variation = getVariation(placement);\n var placements = variation ? flipVariations ? variationPlacements : variationPlacements.filter(function (placement) {\n return getVariation(placement) === variation;\n }) : basePlacements;\n var allowedPlacements = placements.filter(function (placement) {\n return allowedAutoPlacements.indexOf(placement) >= 0;\n });\n\n if (allowedPlacements.length === 0) {\n allowedPlacements = placements;\n\n if (process.env.NODE_ENV !== \"production\") {\n console.error(['Popper: The `allowedAutoPlacements` option did not allow any', 'placements. Ensure the `placement` option matches the variation', 'of the allowed placements.', 'For example, \"auto\" cannot be used to allow \"bottom-start\".', 'Use \"auto-start\" instead.'].join(' '));\n }\n } // $FlowFixMe[incompatible-type]: Flow seems to have problems with two array unions...\n\n\n var overflows = allowedPlacements.reduce(function (acc, placement) {\n acc[placement] = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding\n })[getBasePlacement(placement)];\n return acc;\n }, {});\n return Object.keys(overflows).sort(function (a, b) {\n return overflows[a] - overflows[b];\n });\n}","import { top, bottom, left, right } from \"../enums.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\n\nfunction getSideOffsets(overflow, rect, preventedOffsets) {\n if (preventedOffsets === void 0) {\n preventedOffsets = {\n x: 0,\n y: 0\n };\n }\n\n return {\n top: overflow.top - rect.height - preventedOffsets.y,\n right: overflow.right - rect.width + preventedOffsets.x,\n bottom: overflow.bottom - rect.height + preventedOffsets.y,\n left: overflow.left - rect.width - preventedOffsets.x\n };\n}\n\nfunction isAnySideFullyClipped(overflow) {\n return [top, right, bottom, left].some(function (side) {\n return overflow[side] >= 0;\n });\n}\n\nfunction hide(_ref) {\n var state = _ref.state,\n name = _ref.name;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var preventedOffsets = state.modifiersData.preventOverflow;\n var referenceOverflow = detectOverflow(state, {\n elementContext: 'reference'\n });\n var popperAltOverflow = detectOverflow(state, {\n altBoundary: true\n });\n var referenceClippingOffsets = getSideOffsets(referenceOverflow, referenceRect);\n var popperEscapeOffsets = getSideOffsets(popperAltOverflow, popperRect, preventedOffsets);\n var isReferenceHidden = isAnySideFullyClipped(referenceClippingOffsets);\n var hasPopperEscaped = isAnySideFullyClipped(popperEscapeOffsets);\n state.modifiersData[name] = {\n referenceClippingOffsets: referenceClippingOffsets,\n popperEscapeOffsets: popperEscapeOffsets,\n isReferenceHidden: isReferenceHidden,\n hasPopperEscaped: hasPopperEscaped\n };\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-reference-hidden': isReferenceHidden,\n 'data-popper-escaped': hasPopperEscaped\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'hide',\n enabled: true,\n phase: 'main',\n requiresIfExists: ['preventOverflow'],\n fn: hide\n};","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport { top, left, right, placements } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport function distanceAndSkiddingToXY(placement, rects, offset) {\n var basePlacement = getBasePlacement(placement);\n var invertDistance = [left, top].indexOf(basePlacement) >= 0 ? -1 : 1;\n\n var _ref = typeof offset === 'function' ? offset(Object.assign({}, rects, {\n placement: placement\n })) : offset,\n skidding = _ref[0],\n distance = _ref[1];\n\n skidding = skidding || 0;\n distance = (distance || 0) * invertDistance;\n return [left, right].indexOf(basePlacement) >= 0 ? {\n x: distance,\n y: skidding\n } : {\n x: skidding,\n y: distance\n };\n}\n\nfunction offset(_ref2) {\n var state = _ref2.state,\n options = _ref2.options,\n name = _ref2.name;\n var _options$offset = options.offset,\n offset = _options$offset === void 0 ? [0, 0] : _options$offset;\n var data = placements.reduce(function (acc, placement) {\n acc[placement] = distanceAndSkiddingToXY(placement, state.rects, offset);\n return acc;\n }, {});\n var _data$state$placement = data[state.placement],\n x = _data$state$placement.x,\n y = _data$state$placement.y;\n\n if (state.modifiersData.popperOffsets != null) {\n state.modifiersData.popperOffsets.x += x;\n state.modifiersData.popperOffsets.y += y;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'offset',\n enabled: true,\n phase: 'main',\n requires: ['popperOffsets'],\n fn: offset\n};","import computeOffsets from \"../utils/computeOffsets.js\";\n\nfunction popperOffsets(_ref) {\n var state = _ref.state,\n name = _ref.name;\n // Offsets are the actual position the popper needs to have to be\n // properly positioned near its reference element\n // This is the most basic placement, and will be adjusted by\n // the modifiers in the next step\n state.modifiersData[name] = computeOffsets({\n reference: state.rects.reference,\n element: state.rects.popper,\n strategy: 'absolute',\n placement: state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'popperOffsets',\n enabled: true,\n phase: 'read',\n fn: popperOffsets,\n data: {}\n};","import { top, left, right, bottom, start } from \"../enums.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport getAltAxis from \"../utils/getAltAxis.js\";\nimport { within, withinMaxClamp } from \"../utils/within.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport getFreshSideObject from \"../utils/getFreshSideObject.js\";\nimport { min as mathMin, max as mathMax } from \"../utils/math.js\";\n\nfunction preventOverflow(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? false : _options$altAxis,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n padding = options.padding,\n _options$tether = options.tether,\n tether = _options$tether === void 0 ? true : _options$tether,\n _options$tetherOffset = options.tetherOffset,\n tetherOffset = _options$tetherOffset === void 0 ? 0 : _options$tetherOffset;\n var overflow = detectOverflow(state, {\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n altBoundary: altBoundary\n });\n var basePlacement = getBasePlacement(state.placement);\n var variation = getVariation(state.placement);\n var isBasePlacement = !variation;\n var mainAxis = getMainAxisFromPlacement(basePlacement);\n var altAxis = getAltAxis(mainAxis);\n var popperOffsets = state.modifiersData.popperOffsets;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var tetherOffsetValue = typeof tetherOffset === 'function' ? tetherOffset(Object.assign({}, state.rects, {\n placement: state.placement\n })) : tetherOffset;\n var normalizedTetherOffsetValue = typeof tetherOffsetValue === 'number' ? {\n mainAxis: tetherOffsetValue,\n altAxis: tetherOffsetValue\n } : Object.assign({\n mainAxis: 0,\n altAxis: 0\n }, tetherOffsetValue);\n var offsetModifierState = state.modifiersData.offset ? state.modifiersData.offset[state.placement] : null;\n var data = {\n x: 0,\n y: 0\n };\n\n if (!popperOffsets) {\n return;\n }\n\n if (checkMainAxis) {\n var _offsetModifierState$;\n\n var mainSide = mainAxis === 'y' ? top : left;\n var altSide = mainAxis === 'y' ? bottom : right;\n var len = mainAxis === 'y' ? 'height' : 'width';\n var offset = popperOffsets[mainAxis];\n var min = offset + overflow[mainSide];\n var max = offset - overflow[altSide];\n var additive = tether ? -popperRect[len] / 2 : 0;\n var minLen = variation === start ? referenceRect[len] : popperRect[len];\n var maxLen = variation === start ? -popperRect[len] : -referenceRect[len]; // We need to include the arrow in the calculation so the arrow doesn't go\n // outside the reference bounds\n\n var arrowElement = state.elements.arrow;\n var arrowRect = tether && arrowElement ? getLayoutRect(arrowElement) : {\n width: 0,\n height: 0\n };\n var arrowPaddingObject = state.modifiersData['arrow#persistent'] ? state.modifiersData['arrow#persistent'].padding : getFreshSideObject();\n var arrowPaddingMin = arrowPaddingObject[mainSide];\n var arrowPaddingMax = arrowPaddingObject[altSide]; // If the reference length is smaller than the arrow length, we don't want\n // to include its full size in the calculation. If the reference is small\n // and near the edge of a boundary, the popper can overflow even if the\n // reference is not overflowing as well (e.g. virtual elements with no\n // width or height)\n\n var arrowLen = within(0, referenceRect[len], arrowRect[len]);\n var minOffset = isBasePlacement ? referenceRect[len] / 2 - additive - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis : minLen - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis;\n var maxOffset = isBasePlacement ? -referenceRect[len] / 2 + additive + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis : maxLen + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis;\n var arrowOffsetParent = state.elements.arrow && getOffsetParent(state.elements.arrow);\n var clientOffset = arrowOffsetParent ? mainAxis === 'y' ? arrowOffsetParent.clientTop || 0 : arrowOffsetParent.clientLeft || 0 : 0;\n var offsetModifierValue = (_offsetModifierState$ = offsetModifierState == null ? void 0 : offsetModifierState[mainAxis]) != null ? _offsetModifierState$ : 0;\n var tetherMin = offset + minOffset - offsetModifierValue - clientOffset;\n var tetherMax = offset + maxOffset - offsetModifierValue;\n var preventedOffset = within(tether ? mathMin(min, tetherMin) : min, offset, tether ? mathMax(max, tetherMax) : max);\n popperOffsets[mainAxis] = preventedOffset;\n data[mainAxis] = preventedOffset - offset;\n }\n\n if (checkAltAxis) {\n var _offsetModifierState$2;\n\n var _mainSide = mainAxis === 'x' ? top : left;\n\n var _altSide = mainAxis === 'x' ? bottom : right;\n\n var _offset = popperOffsets[altAxis];\n\n var _len = altAxis === 'y' ? 'height' : 'width';\n\n var _min = _offset + overflow[_mainSide];\n\n var _max = _offset - overflow[_altSide];\n\n var isOriginSide = [top, left].indexOf(basePlacement) !== -1;\n\n var _offsetModifierValue = (_offsetModifierState$2 = offsetModifierState == null ? void 0 : offsetModifierState[altAxis]) != null ? _offsetModifierState$2 : 0;\n\n var _tetherMin = isOriginSide ? _min : _offset - referenceRect[_len] - popperRect[_len] - _offsetModifierValue + normalizedTetherOffsetValue.altAxis;\n\n var _tetherMax = isOriginSide ? _offset + referenceRect[_len] + popperRect[_len] - _offsetModifierValue - normalizedTetherOffsetValue.altAxis : _max;\n\n var _preventedOffset = tether && isOriginSide ? withinMaxClamp(_tetherMin, _offset, _tetherMax) : within(tether ? _tetherMin : _min, _offset, tether ? _tetherMax : _max);\n\n popperOffsets[altAxis] = _preventedOffset;\n data[altAxis] = _preventedOffset - _offset;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'preventOverflow',\n enabled: true,\n phase: 'main',\n fn: preventOverflow,\n requiresIfExists: ['offset']\n};","export default function getAltAxis(axis) {\n return axis === 'x' ? 'y' : 'x';\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getNodeScroll from \"./getNodeScroll.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport { round } from \"../utils/math.js\";\n\nfunction isElementScaled(element) {\n var rect = element.getBoundingClientRect();\n var scaleX = round(rect.width) / element.offsetWidth || 1;\n var scaleY = round(rect.height) / element.offsetHeight || 1;\n return scaleX !== 1 || scaleY !== 1;\n} // Returns the composite rect of an element relative to its offsetParent.\n// Composite means it takes into account transforms as well as layout.\n\n\nexport default function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {\n if (isFixed === void 0) {\n isFixed = false;\n }\n\n var isOffsetParentAnElement = isHTMLElement(offsetParent);\n var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);\n var documentElement = getDocumentElement(offsetParent);\n var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);\n var scroll = {\n scrollLeft: 0,\n scrollTop: 0\n };\n var offsets = {\n x: 0,\n y: 0\n };\n\n if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {\n if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078\n isScrollParent(documentElement)) {\n scroll = getNodeScroll(offsetParent);\n }\n\n if (isHTMLElement(offsetParent)) {\n offsets = getBoundingClientRect(offsetParent, true);\n offsets.x += offsetParent.clientLeft;\n offsets.y += offsetParent.clientTop;\n } else if (documentElement) {\n offsets.x = getWindowScrollBarX(documentElement);\n }\n }\n\n return {\n x: rect.left + scroll.scrollLeft - offsets.x,\n y: rect.top + scroll.scrollTop - offsets.y,\n width: rect.width,\n height: rect.height\n };\n}","import getWindowScroll from \"./getWindowScroll.js\";\nimport getWindow from \"./getWindow.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getHTMLElementScroll from \"./getHTMLElementScroll.js\";\nexport default function getNodeScroll(node) {\n if (node === getWindow(node) || !isHTMLElement(node)) {\n return getWindowScroll(node);\n } else {\n return getHTMLElementScroll(node);\n }\n}","export default function getHTMLElementScroll(element) {\n return {\n scrollLeft: element.scrollLeft,\n scrollTop: element.scrollTop\n };\n}","import { modifierPhases } from \"../enums.js\"; // source: https://stackoverflow.com/questions/49875255\n\nfunction order(modifiers) {\n var map = new Map();\n var visited = new Set();\n var result = [];\n modifiers.forEach(function (modifier) {\n map.set(modifier.name, modifier);\n }); // On visiting object, check for its dependencies and visit them recursively\n\n function sort(modifier) {\n visited.add(modifier.name);\n var requires = [].concat(modifier.requires || [], modifier.requiresIfExists || []);\n requires.forEach(function (dep) {\n if (!visited.has(dep)) {\n var depModifier = map.get(dep);\n\n if (depModifier) {\n sort(depModifier);\n }\n }\n });\n result.push(modifier);\n }\n\n modifiers.forEach(function (modifier) {\n if (!visited.has(modifier.name)) {\n // check for visited object\n sort(modifier);\n }\n });\n return result;\n}\n\nexport default function orderModifiers(modifiers) {\n // order based on dependencies\n var orderedModifiers = order(modifiers); // order based on phase\n\n return modifierPhases.reduce(function (acc, phase) {\n return acc.concat(orderedModifiers.filter(function (modifier) {\n return modifier.phase === phase;\n }));\n }, []);\n}","import getCompositeRect from \"./dom-utils/getCompositeRect.js\";\nimport getLayoutRect from \"./dom-utils/getLayoutRect.js\";\nimport listScrollParents from \"./dom-utils/listScrollParents.js\";\nimport getOffsetParent from \"./dom-utils/getOffsetParent.js\";\nimport getComputedStyle from \"./dom-utils/getComputedStyle.js\";\nimport orderModifiers from \"./utils/orderModifiers.js\";\nimport debounce from \"./utils/debounce.js\";\nimport validateModifiers from \"./utils/validateModifiers.js\";\nimport uniqueBy from \"./utils/uniqueBy.js\";\nimport getBasePlacement from \"./utils/getBasePlacement.js\";\nimport mergeByName from \"./utils/mergeByName.js\";\nimport detectOverflow from \"./utils/detectOverflow.js\";\nimport { isElement } from \"./dom-utils/instanceOf.js\";\nimport { auto } from \"./enums.js\";\nvar INVALID_ELEMENT_ERROR = 'Popper: Invalid reference or popper argument provided. They must be either a DOM element or virtual element.';\nvar INFINITE_LOOP_ERROR = 'Popper: An infinite loop in the modifiers cycle has been detected! The cycle has been interrupted to prevent a browser crash.';\nvar DEFAULT_OPTIONS = {\n placement: 'bottom',\n modifiers: [],\n strategy: 'absolute'\n};\n\nfunction areValidElements() {\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n\n return !args.some(function (element) {\n return !(element && typeof element.getBoundingClientRect === 'function');\n });\n}\n\nexport function popperGenerator(generatorOptions) {\n if (generatorOptions === void 0) {\n generatorOptions = {};\n }\n\n var _generatorOptions = generatorOptions,\n _generatorOptions$def = _generatorOptions.defaultModifiers,\n defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,\n _generatorOptions$def2 = _generatorOptions.defaultOptions,\n defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;\n return function createPopper(reference, popper, options) {\n if (options === void 0) {\n options = defaultOptions;\n }\n\n var state = {\n placement: 'bottom',\n orderedModifiers: [],\n options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),\n modifiersData: {},\n elements: {\n reference: reference,\n popper: popper\n },\n attributes: {},\n styles: {}\n };\n var effectCleanupFns = [];\n var isDestroyed = false;\n var instance = {\n state: state,\n setOptions: function setOptions(setOptionsAction) {\n var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;\n cleanupModifierEffects();\n state.options = Object.assign({}, defaultOptions, state.options, options);\n state.scrollParents = {\n reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],\n popper: listScrollParents(popper)\n }; // Orders the modifiers based on their dependencies and `phase`\n // properties\n\n var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers\n\n state.orderedModifiers = orderedModifiers.filter(function (m) {\n return m.enabled;\n }); // Validate the provided modifiers so that the consumer will get warned\n // if one of the modifiers is invalid for any reason\n\n if (process.env.NODE_ENV !== \"production\") {\n var modifiers = uniqueBy([].concat(orderedModifiers, state.options.modifiers), function (_ref) {\n var name = _ref.name;\n return name;\n });\n validateModifiers(modifiers);\n\n if (getBasePlacement(state.options.placement) === auto) {\n var flipModifier = state.orderedModifiers.find(function (_ref2) {\n var name = _ref2.name;\n return name === 'flip';\n });\n\n if (!flipModifier) {\n console.error(['Popper: \"auto\" placements require the \"flip\" modifier be', 'present and enabled to work.'].join(' '));\n }\n }\n\n var _getComputedStyle = getComputedStyle(popper),\n marginTop = _getComputedStyle.marginTop,\n marginRight = _getComputedStyle.marginRight,\n marginBottom = _getComputedStyle.marginBottom,\n marginLeft = _getComputedStyle.marginLeft; // We no longer take into account `margins` on the popper, and it can\n // cause bugs with positioning, so we'll warn the consumer\n\n\n if ([marginTop, marginRight, marginBottom, marginLeft].some(function (margin) {\n return parseFloat(margin);\n })) {\n console.warn(['Popper: CSS \"margin\" styles cannot be used to apply padding', 'between the popper and its reference element or boundary.', 'To replicate margin, use the `offset` modifier, as well as', 'the `padding` option in the `preventOverflow` and `flip`', 'modifiers.'].join(' '));\n }\n }\n\n runModifierEffects();\n return instance.update();\n },\n // Sync update – it will always be executed, even if not necessary. This\n // is useful for low frequency updates where sync behavior simplifies the\n // logic.\n // For high frequency updates (e.g. `resize` and `scroll` events), always\n // prefer the async Popper#update method\n forceUpdate: function forceUpdate() {\n if (isDestroyed) {\n return;\n }\n\n var _state$elements = state.elements,\n reference = _state$elements.reference,\n popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements\n // anymore\n\n if (!areValidElements(reference, popper)) {\n if (process.env.NODE_ENV !== \"production\") {\n console.error(INVALID_ELEMENT_ERROR);\n }\n\n return;\n } // Store the reference and popper rects to be read by modifiers\n\n\n state.rects = {\n reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),\n popper: getLayoutRect(popper)\n }; // Modifiers have the ability to reset the current update cycle. The\n // most common use case for this is the `flip` modifier changing the\n // placement, which then needs to re-run all the modifiers, because the\n // logic was previously ran for the previous placement and is therefore\n // stale/incorrect\n\n state.reset = false;\n state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier\n // is filled with the initial data specified by the modifier. This means\n // it doesn't persist and is fresh on each update.\n // To ensure persistent data, use `${name}#persistent`\n\n state.orderedModifiers.forEach(function (modifier) {\n return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);\n });\n var __debug_loops__ = 0;\n\n for (var index = 0; index < state.orderedModifiers.length; index++) {\n if (process.env.NODE_ENV !== \"production\") {\n __debug_loops__ += 1;\n\n if (__debug_loops__ > 100) {\n console.error(INFINITE_LOOP_ERROR);\n break;\n }\n }\n\n if (state.reset === true) {\n state.reset = false;\n index = -1;\n continue;\n }\n\n var _state$orderedModifie = state.orderedModifiers[index],\n fn = _state$orderedModifie.fn,\n _state$orderedModifie2 = _state$orderedModifie.options,\n _options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,\n name = _state$orderedModifie.name;\n\n if (typeof fn === 'function') {\n state = fn({\n state: state,\n options: _options,\n name: name,\n instance: instance\n }) || state;\n }\n }\n },\n // Async and optimistically optimized update – it will not be executed if\n // not necessary (debounced to run at most once-per-tick)\n update: debounce(function () {\n return new Promise(function (resolve) {\n instance.forceUpdate();\n resolve(state);\n });\n }),\n destroy: function destroy() {\n cleanupModifierEffects();\n isDestroyed = true;\n }\n };\n\n if (!areValidElements(reference, popper)) {\n if (process.env.NODE_ENV !== \"production\") {\n console.error(INVALID_ELEMENT_ERROR);\n }\n\n return instance;\n }\n\n instance.setOptions(options).then(function (state) {\n if (!isDestroyed && options.onFirstUpdate) {\n options.onFirstUpdate(state);\n }\n }); // Modifiers have the ability to execute arbitrary code before the first\n // update cycle runs. They will be executed in the same order as the update\n // cycle. This is useful when a modifier adds some persistent data that\n // other modifiers need to use, but the modifier is run after the dependent\n // one.\n\n function runModifierEffects() {\n state.orderedModifiers.forEach(function (_ref3) {\n var name = _ref3.name,\n _ref3$options = _ref3.options,\n options = _ref3$options === void 0 ? {} : _ref3$options,\n effect = _ref3.effect;\n\n if (typeof effect === 'function') {\n var cleanupFn = effect({\n state: state,\n name: name,\n instance: instance,\n options: options\n });\n\n var noopFn = function noopFn() {};\n\n effectCleanupFns.push(cleanupFn || noopFn);\n }\n });\n }\n\n function cleanupModifierEffects() {\n effectCleanupFns.forEach(function (fn) {\n return fn();\n });\n effectCleanupFns = [];\n }\n\n return instance;\n };\n}\nexport var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules\n\nexport { detectOverflow };","export default function debounce(fn) {\n var pending;\n return function () {\n if (!pending) {\n pending = new Promise(function (resolve) {\n Promise.resolve().then(function () {\n pending = undefined;\n resolve(fn());\n });\n });\n }\n\n return pending;\n };\n}","export default function mergeByName(modifiers) {\n var merged = modifiers.reduce(function (merged, current) {\n var existing = merged[current.name];\n merged[current.name] = existing ? Object.assign({}, existing, current, {\n options: Object.assign({}, existing.options, current.options),\n data: Object.assign({}, existing.data, current.data)\n }) : current;\n return merged;\n }, {}); // IE11 does not support Object.values\n\n return Object.keys(merged).map(function (key) {\n return merged[key];\n });\n}","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nimport offset from \"./modifiers/offset.js\";\nimport flip from \"./modifiers/flip.js\";\nimport preventOverflow from \"./modifiers/preventOverflow.js\";\nimport arrow from \"./modifiers/arrow.js\";\nimport hide from \"./modifiers/hide.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles, offset, flip, preventOverflow, arrow, hide];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow }; // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper as createPopperLite } from \"./popper-lite.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport * from \"./modifiers/index.js\";","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow };","/*!\n * Bootstrap v5.2.3 (https://getbootstrap.com/)\n * Copyright 2011-2022 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n */\nimport * as Popper from '@popperjs/core';\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/index.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\nconst MAX_UID = 1000000;\nconst MILLISECONDS_MULTIPLIER = 1000;\nconst TRANSITION_END = 'transitionend'; // Shout-out Angus Croll (https://goo.gl/pxwQGp)\n\nconst toType = object => {\n if (object === null || object === undefined) {\n return `${object}`;\n }\n\n return Object.prototype.toString.call(object).match(/\\s([a-z]+)/i)[1].toLowerCase();\n};\n/**\n * Public Util API\n */\n\n\nconst getUID = prefix => {\n do {\n prefix += Math.floor(Math.random() * MAX_UID);\n } while (document.getElementById(prefix));\n\n return prefix;\n};\n\nconst getSelector = element => {\n let selector = element.getAttribute('data-bs-target');\n\n if (!selector || selector === '#') {\n let hrefAttribute = element.getAttribute('href'); // The only valid content that could double as a selector are IDs or classes,\n // so everything starting with `#` or `.`. If a \"real\" URL is used as the selector,\n // `document.querySelector` will rightfully complain it is invalid.\n // See https://github.com/twbs/bootstrap/issues/32273\n\n if (!hrefAttribute || !hrefAttribute.includes('#') && !hrefAttribute.startsWith('.')) {\n return null;\n } // Just in case some CMS puts out a full URL with the anchor appended\n\n\n if (hrefAttribute.includes('#') && !hrefAttribute.startsWith('#')) {\n hrefAttribute = `#${hrefAttribute.split('#')[1]}`;\n }\n\n selector = hrefAttribute && hrefAttribute !== '#' ? hrefAttribute.trim() : null;\n }\n\n return selector;\n};\n\nconst getSelectorFromElement = element => {\n const selector = getSelector(element);\n\n if (selector) {\n return document.querySelector(selector) ? selector : null;\n }\n\n return null;\n};\n\nconst getElementFromSelector = element => {\n const selector = getSelector(element);\n return selector ? document.querySelector(selector) : null;\n};\n\nconst getTransitionDurationFromElement = element => {\n if (!element) {\n return 0;\n } // Get transition-duration of the element\n\n\n let {\n transitionDuration,\n transitionDelay\n } = window.getComputedStyle(element);\n const floatTransitionDuration = Number.parseFloat(transitionDuration);\n const floatTransitionDelay = Number.parseFloat(transitionDelay); // Return 0 if element or transition duration is not found\n\n if (!floatTransitionDuration && !floatTransitionDelay) {\n return 0;\n } // If multiple durations are defined, take the first\n\n\n transitionDuration = transitionDuration.split(',')[0];\n transitionDelay = transitionDelay.split(',')[0];\n return (Number.parseFloat(transitionDuration) + Number.parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;\n};\n\nconst triggerTransitionEnd = element => {\n element.dispatchEvent(new Event(TRANSITION_END));\n};\n\nconst isElement = object => {\n if (!object || typeof object !== 'object') {\n return false;\n }\n\n if (typeof object.jquery !== 'undefined') {\n object = object[0];\n }\n\n return typeof object.nodeType !== 'undefined';\n};\n\nconst getElement = object => {\n // it's a jQuery object or a node element\n if (isElement(object)) {\n return object.jquery ? object[0] : object;\n }\n\n if (typeof object === 'string' && object.length > 0) {\n return document.querySelector(object);\n }\n\n return null;\n};\n\nconst isVisible = element => {\n if (!isElement(element) || element.getClientRects().length === 0) {\n return false;\n }\n\n const elementIsVisible = getComputedStyle(element).getPropertyValue('visibility') === 'visible'; // Handle `details` element as its content may falsie appear visible when it is closed\n\n const closedDetails = element.closest('details:not([open])');\n\n if (!closedDetails) {\n return elementIsVisible;\n }\n\n if (closedDetails !== element) {\n const summary = element.closest('summary');\n\n if (summary && summary.parentNode !== closedDetails) {\n return false;\n }\n\n if (summary === null) {\n return false;\n }\n }\n\n return elementIsVisible;\n};\n\nconst isDisabled = element => {\n if (!element || element.nodeType !== Node.ELEMENT_NODE) {\n return true;\n }\n\n if (element.classList.contains('disabled')) {\n return true;\n }\n\n if (typeof element.disabled !== 'undefined') {\n return element.disabled;\n }\n\n return element.hasAttribute('disabled') && element.getAttribute('disabled') !== 'false';\n};\n\nconst findShadowRoot = element => {\n if (!document.documentElement.attachShadow) {\n return null;\n } // Can find the shadow root otherwise it'll return the document\n\n\n if (typeof element.getRootNode === 'function') {\n const root = element.getRootNode();\n return root instanceof ShadowRoot ? root : null;\n }\n\n if (element instanceof ShadowRoot) {\n return element;\n } // when we don't find a shadow root\n\n\n if (!element.parentNode) {\n return null;\n }\n\n return findShadowRoot(element.parentNode);\n};\n\nconst noop = () => {};\n/**\n * Trick to restart an element's animation\n *\n * @param {HTMLElement} element\n * @return void\n *\n * @see https://www.charistheo.io/blog/2021/02/restart-a-css-animation-with-javascript/#restarting-a-css-animation\n */\n\n\nconst reflow = element => {\n element.offsetHeight; // eslint-disable-line no-unused-expressions\n};\n\nconst getjQuery = () => {\n if (window.jQuery && !document.body.hasAttribute('data-bs-no-jquery')) {\n return window.jQuery;\n }\n\n return null;\n};\n\nconst DOMContentLoadedCallbacks = [];\n\nconst onDOMContentLoaded = callback => {\n if (document.readyState === 'loading') {\n // add listener on the first call when the document is in loading state\n if (!DOMContentLoadedCallbacks.length) {\n document.addEventListener('DOMContentLoaded', () => {\n for (const callback of DOMContentLoadedCallbacks) {\n callback();\n }\n });\n }\n\n DOMContentLoadedCallbacks.push(callback);\n } else {\n callback();\n }\n};\n\nconst isRTL = () => document.documentElement.dir === 'rtl';\n\nconst defineJQueryPlugin = plugin => {\n onDOMContentLoaded(() => {\n const $ = getjQuery();\n /* istanbul ignore if */\n\n if ($) {\n const name = plugin.NAME;\n const JQUERY_NO_CONFLICT = $.fn[name];\n $.fn[name] = plugin.jQueryInterface;\n $.fn[name].Constructor = plugin;\n\n $.fn[name].noConflict = () => {\n $.fn[name] = JQUERY_NO_CONFLICT;\n return plugin.jQueryInterface;\n };\n }\n });\n};\n\nconst execute = callback => {\n if (typeof callback === 'function') {\n callback();\n }\n};\n\nconst executeAfterTransition = (callback, transitionElement, waitForTransition = true) => {\n if (!waitForTransition) {\n execute(callback);\n return;\n }\n\n const durationPadding = 5;\n const emulatedDuration = getTransitionDurationFromElement(transitionElement) + durationPadding;\n let called = false;\n\n const handler = ({\n target\n }) => {\n if (target !== transitionElement) {\n return;\n }\n\n called = true;\n transitionElement.removeEventListener(TRANSITION_END, handler);\n execute(callback);\n };\n\n transitionElement.addEventListener(TRANSITION_END, handler);\n setTimeout(() => {\n if (!called) {\n triggerTransitionEnd(transitionElement);\n }\n }, emulatedDuration);\n};\n/**\n * Return the previous/next element of a list.\n *\n * @param {array} list The list of elements\n * @param activeElement The active element\n * @param shouldGetNext Choose to get next or previous element\n * @param isCycleAllowed\n * @return {Element|elem} The proper element\n */\n\n\nconst getNextActiveElement = (list, activeElement, shouldGetNext, isCycleAllowed) => {\n const listLength = list.length;\n let index = list.indexOf(activeElement); // if the element does not exist in the list return an element\n // depending on the direction and if cycle is allowed\n\n if (index === -1) {\n return !shouldGetNext && isCycleAllowed ? list[listLength - 1] : list[0];\n }\n\n index += shouldGetNext ? 1 : -1;\n\n if (isCycleAllowed) {\n index = (index + listLength) % listLength;\n }\n\n return list[Math.max(0, Math.min(index, listLength - 1))];\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): dom/event-handler.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst namespaceRegex = /[^.]*(?=\\..*)\\.|.*/;\nconst stripNameRegex = /\\..*/;\nconst stripUidRegex = /::\\d+$/;\nconst eventRegistry = {}; // Events storage\n\nlet uidEvent = 1;\nconst customEvents = {\n mouseenter: 'mouseover',\n mouseleave: 'mouseout'\n};\nconst nativeEvents = new Set(['click', 'dblclick', 'mouseup', 'mousedown', 'contextmenu', 'mousewheel', 'DOMMouseScroll', 'mouseover', 'mouseout', 'mousemove', 'selectstart', 'selectend', 'keydown', 'keypress', 'keyup', 'orientationchange', 'touchstart', 'touchmove', 'touchend', 'touchcancel', 'pointerdown', 'pointermove', 'pointerup', 'pointerleave', 'pointercancel', 'gesturestart', 'gesturechange', 'gestureend', 'focus', 'blur', 'change', 'reset', 'select', 'submit', 'focusin', 'focusout', 'load', 'unload', 'beforeunload', 'resize', 'move', 'DOMContentLoaded', 'readystatechange', 'error', 'abort', 'scroll']);\n/**\n * Private methods\n */\n\nfunction makeEventUid(element, uid) {\n return uid && `${uid}::${uidEvent++}` || element.uidEvent || uidEvent++;\n}\n\nfunction getElementEvents(element) {\n const uid = makeEventUid(element);\n element.uidEvent = uid;\n eventRegistry[uid] = eventRegistry[uid] || {};\n return eventRegistry[uid];\n}\n\nfunction bootstrapHandler(element, fn) {\n return function handler(event) {\n hydrateObj(event, {\n delegateTarget: element\n });\n\n if (handler.oneOff) {\n EventHandler.off(element, event.type, fn);\n }\n\n return fn.apply(element, [event]);\n };\n}\n\nfunction bootstrapDelegationHandler(element, selector, fn) {\n return function handler(event) {\n const domElements = element.querySelectorAll(selector);\n\n for (let {\n target\n } = event; target && target !== this; target = target.parentNode) {\n for (const domElement of domElements) {\n if (domElement !== target) {\n continue;\n }\n\n hydrateObj(event, {\n delegateTarget: target\n });\n\n if (handler.oneOff) {\n EventHandler.off(element, event.type, selector, fn);\n }\n\n return fn.apply(target, [event]);\n }\n }\n };\n}\n\nfunction findHandler(events, callable, delegationSelector = null) {\n return Object.values(events).find(event => event.callable === callable && event.delegationSelector === delegationSelector);\n}\n\nfunction normalizeParameters(originalTypeEvent, handler, delegationFunction) {\n const isDelegated = typeof handler === 'string'; // todo: tooltip passes `false` instead of selector, so we need to check\n\n const callable = isDelegated ? delegationFunction : handler || delegationFunction;\n let typeEvent = getTypeEvent(originalTypeEvent);\n\n if (!nativeEvents.has(typeEvent)) {\n typeEvent = originalTypeEvent;\n }\n\n return [isDelegated, callable, typeEvent];\n}\n\nfunction addHandler(element, originalTypeEvent, handler, delegationFunction, oneOff) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n\n let [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction); // in case of mouseenter or mouseleave wrap the handler within a function that checks for its DOM position\n // this prevents the handler from being dispatched the same way as mouseover or mouseout does\n\n if (originalTypeEvent in customEvents) {\n const wrapFunction = fn => {\n return function (event) {\n if (!event.relatedTarget || event.relatedTarget !== event.delegateTarget && !event.delegateTarget.contains(event.relatedTarget)) {\n return fn.call(this, event);\n }\n };\n };\n\n callable = wrapFunction(callable);\n }\n\n const events = getElementEvents(element);\n const handlers = events[typeEvent] || (events[typeEvent] = {});\n const previousFunction = findHandler(handlers, callable, isDelegated ? handler : null);\n\n if (previousFunction) {\n previousFunction.oneOff = previousFunction.oneOff && oneOff;\n return;\n }\n\n const uid = makeEventUid(callable, originalTypeEvent.replace(namespaceRegex, ''));\n const fn = isDelegated ? bootstrapDelegationHandler(element, handler, callable) : bootstrapHandler(element, callable);\n fn.delegationSelector = isDelegated ? handler : null;\n fn.callable = callable;\n fn.oneOff = oneOff;\n fn.uidEvent = uid;\n handlers[uid] = fn;\n element.addEventListener(typeEvent, fn, isDelegated);\n}\n\nfunction removeHandler(element, events, typeEvent, handler, delegationSelector) {\n const fn = findHandler(events[typeEvent], handler, delegationSelector);\n\n if (!fn) {\n return;\n }\n\n element.removeEventListener(typeEvent, fn, Boolean(delegationSelector));\n delete events[typeEvent][fn.uidEvent];\n}\n\nfunction removeNamespacedHandlers(element, events, typeEvent, namespace) {\n const storeElementEvent = events[typeEvent] || {};\n\n for (const handlerKey of Object.keys(storeElementEvent)) {\n if (handlerKey.includes(namespace)) {\n const event = storeElementEvent[handlerKey];\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n}\n\nfunction getTypeEvent(event) {\n // allow to get the native events from namespaced events ('click.bs.button' --> 'click')\n event = event.replace(stripNameRegex, '');\n return customEvents[event] || event;\n}\n\nconst EventHandler = {\n on(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, false);\n },\n\n one(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, true);\n },\n\n off(element, originalTypeEvent, handler, delegationFunction) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n\n const [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n const inNamespace = typeEvent !== originalTypeEvent;\n const events = getElementEvents(element);\n const storeElementEvent = events[typeEvent] || {};\n const isNamespace = originalTypeEvent.startsWith('.');\n\n if (typeof callable !== 'undefined') {\n // Simplest case: handler is passed, remove that listener ONLY.\n if (!Object.keys(storeElementEvent).length) {\n return;\n }\n\n removeHandler(element, events, typeEvent, callable, isDelegated ? handler : null);\n return;\n }\n\n if (isNamespace) {\n for (const elementEvent of Object.keys(events)) {\n removeNamespacedHandlers(element, events, elementEvent, originalTypeEvent.slice(1));\n }\n }\n\n for (const keyHandlers of Object.keys(storeElementEvent)) {\n const handlerKey = keyHandlers.replace(stripUidRegex, '');\n\n if (!inNamespace || originalTypeEvent.includes(handlerKey)) {\n const event = storeElementEvent[keyHandlers];\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n },\n\n trigger(element, event, args) {\n if (typeof event !== 'string' || !element) {\n return null;\n }\n\n const $ = getjQuery();\n const typeEvent = getTypeEvent(event);\n const inNamespace = event !== typeEvent;\n let jQueryEvent = null;\n let bubbles = true;\n let nativeDispatch = true;\n let defaultPrevented = false;\n\n if (inNamespace && $) {\n jQueryEvent = $.Event(event, args);\n $(element).trigger(jQueryEvent);\n bubbles = !jQueryEvent.isPropagationStopped();\n nativeDispatch = !jQueryEvent.isImmediatePropagationStopped();\n defaultPrevented = jQueryEvent.isDefaultPrevented();\n }\n\n let evt = new Event(event, {\n bubbles,\n cancelable: true\n });\n evt = hydrateObj(evt, args);\n\n if (defaultPrevented) {\n evt.preventDefault();\n }\n\n if (nativeDispatch) {\n element.dispatchEvent(evt);\n }\n\n if (evt.defaultPrevented && jQueryEvent) {\n jQueryEvent.preventDefault();\n }\n\n return evt;\n }\n\n};\n\nfunction hydrateObj(obj, meta) {\n for (const [key, value] of Object.entries(meta || {})) {\n try {\n obj[key] = value;\n } catch (_unused) {\n Object.defineProperty(obj, key, {\n configurable: true,\n\n get() {\n return value;\n }\n\n });\n }\n }\n\n return obj;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): dom/data.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n/**\n * Constants\n */\nconst elementMap = new Map();\nconst Data = {\n set(element, key, instance) {\n if (!elementMap.has(element)) {\n elementMap.set(element, new Map());\n }\n\n const instanceMap = elementMap.get(element); // make it clear we only want one instance per element\n // can be removed later when multiple key/instances are fine to be used\n\n if (!instanceMap.has(key) && instanceMap.size !== 0) {\n // eslint-disable-next-line no-console\n console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(instanceMap.keys())[0]}.`);\n return;\n }\n\n instanceMap.set(key, instance);\n },\n\n get(element, key) {\n if (elementMap.has(element)) {\n return elementMap.get(element).get(key) || null;\n }\n\n return null;\n },\n\n remove(element, key) {\n if (!elementMap.has(element)) {\n return;\n }\n\n const instanceMap = elementMap.get(element);\n instanceMap.delete(key); // free up element references if there are no instances left for an element\n\n if (instanceMap.size === 0) {\n elementMap.delete(element);\n }\n }\n\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): dom/manipulator.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\nfunction normalizeData(value) {\n if (value === 'true') {\n return true;\n }\n\n if (value === 'false') {\n return false;\n }\n\n if (value === Number(value).toString()) {\n return Number(value);\n }\n\n if (value === '' || value === 'null') {\n return null;\n }\n\n if (typeof value !== 'string') {\n return value;\n }\n\n try {\n return JSON.parse(decodeURIComponent(value));\n } catch (_unused) {\n return value;\n }\n}\n\nfunction normalizeDataKey(key) {\n return key.replace(/[A-Z]/g, chr => `-${chr.toLowerCase()}`);\n}\n\nconst Manipulator = {\n setDataAttribute(element, key, value) {\n element.setAttribute(`data-bs-${normalizeDataKey(key)}`, value);\n },\n\n removeDataAttribute(element, key) {\n element.removeAttribute(`data-bs-${normalizeDataKey(key)}`);\n },\n\n getDataAttributes(element) {\n if (!element) {\n return {};\n }\n\n const attributes = {};\n const bsKeys = Object.keys(element.dataset).filter(key => key.startsWith('bs') && !key.startsWith('bsConfig'));\n\n for (const key of bsKeys) {\n let pureKey = key.replace(/^bs/, '');\n pureKey = pureKey.charAt(0).toLowerCase() + pureKey.slice(1, pureKey.length);\n attributes[pureKey] = normalizeData(element.dataset[key]);\n }\n\n return attributes;\n },\n\n getDataAttribute(element, key) {\n return normalizeData(element.getAttribute(`data-bs-${normalizeDataKey(key)}`));\n }\n\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/config.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Class definition\n */\n\nclass Config {\n // Getters\n static get Default() {\n return {};\n }\n\n static get DefaultType() {\n return {};\n }\n\n static get NAME() {\n throw new Error('You have to implement the static method \"NAME\", for each component!');\n }\n\n _getConfig(config) {\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n\n this._typeCheckConfig(config);\n\n return config;\n }\n\n _configAfterMerge(config) {\n return config;\n }\n\n _mergeConfigObj(config, element) {\n const jsonConfig = isElement(element) ? Manipulator.getDataAttribute(element, 'config') : {}; // try to parse\n\n return { ...this.constructor.Default,\n ...(typeof jsonConfig === 'object' ? jsonConfig : {}),\n ...(isElement(element) ? Manipulator.getDataAttributes(element) : {}),\n ...(typeof config === 'object' ? config : {})\n };\n }\n\n _typeCheckConfig(config, configTypes = this.constructor.DefaultType) {\n for (const property of Object.keys(configTypes)) {\n const expectedTypes = configTypes[property];\n const value = config[property];\n const valueType = isElement(value) ? 'element' : toType(value);\n\n if (!new RegExp(expectedTypes).test(valueType)) {\n throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option \"${property}\" provided type \"${valueType}\" but expected type \"${expectedTypes}\".`);\n }\n }\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): base-component.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst VERSION = '5.2.3';\n/**\n * Class definition\n */\n\nclass BaseComponent extends Config {\n constructor(element, config) {\n super();\n element = getElement(element);\n\n if (!element) {\n return;\n }\n\n this._element = element;\n this._config = this._getConfig(config);\n Data.set(this._element, this.constructor.DATA_KEY, this);\n } // Public\n\n\n dispose() {\n Data.remove(this._element, this.constructor.DATA_KEY);\n EventHandler.off(this._element, this.constructor.EVENT_KEY);\n\n for (const propertyName of Object.getOwnPropertyNames(this)) {\n this[propertyName] = null;\n }\n }\n\n _queueCallback(callback, element, isAnimated = true) {\n executeAfterTransition(callback, element, isAnimated);\n }\n\n _getConfig(config) {\n config = this._mergeConfigObj(config, this._element);\n config = this._configAfterMerge(config);\n\n this._typeCheckConfig(config);\n\n return config;\n } // Static\n\n\n static getInstance(element) {\n return Data.get(getElement(element), this.DATA_KEY);\n }\n\n static getOrCreateInstance(element, config = {}) {\n return this.getInstance(element) || new this(element, typeof config === 'object' ? config : null);\n }\n\n static get VERSION() {\n return VERSION;\n }\n\n static get DATA_KEY() {\n return `bs.${this.NAME}`;\n }\n\n static get EVENT_KEY() {\n return `.${this.DATA_KEY}`;\n }\n\n static eventName(name) {\n return `${name}${this.EVENT_KEY}`;\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/component-functions.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst enableDismissTrigger = (component, method = 'hide') => {\n const clickEvent = `click.dismiss${component.EVENT_KEY}`;\n const name = component.NAME;\n EventHandler.on(document, clickEvent, `[data-bs-dismiss=\"${name}\"]`, function (event) {\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n\n if (isDisabled(this)) {\n return;\n }\n\n const target = getElementFromSelector(this) || this.closest(`.${name}`);\n const instance = component.getOrCreateInstance(target); // Method argument is left, for Alert and only, as it doesn't implement the 'hide' method\n\n instance[method]();\n });\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): alert.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$f = 'alert';\nconst DATA_KEY$a = 'bs.alert';\nconst EVENT_KEY$b = `.${DATA_KEY$a}`;\nconst EVENT_CLOSE = `close${EVENT_KEY$b}`;\nconst EVENT_CLOSED = `closed${EVENT_KEY$b}`;\nconst CLASS_NAME_FADE$5 = 'fade';\nconst CLASS_NAME_SHOW$8 = 'show';\n/**\n * Class definition\n */\n\nclass Alert extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$f;\n } // Public\n\n\n close() {\n const closeEvent = EventHandler.trigger(this._element, EVENT_CLOSE);\n\n if (closeEvent.defaultPrevented) {\n return;\n }\n\n this._element.classList.remove(CLASS_NAME_SHOW$8);\n\n const isAnimated = this._element.classList.contains(CLASS_NAME_FADE$5);\n\n this._queueCallback(() => this._destroyElement(), this._element, isAnimated);\n } // Private\n\n\n _destroyElement() {\n this._element.remove();\n\n EventHandler.trigger(this._element, EVENT_CLOSED);\n this.dispose();\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Alert.getOrCreateInstance(this);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config](this);\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nenableDismissTrigger(Alert, 'close');\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Alert);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): button.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$e = 'button';\nconst DATA_KEY$9 = 'bs.button';\nconst EVENT_KEY$a = `.${DATA_KEY$9}`;\nconst DATA_API_KEY$6 = '.data-api';\nconst CLASS_NAME_ACTIVE$3 = 'active';\nconst SELECTOR_DATA_TOGGLE$5 = '[data-bs-toggle=\"button\"]';\nconst EVENT_CLICK_DATA_API$6 = `click${EVENT_KEY$a}${DATA_API_KEY$6}`;\n/**\n * Class definition\n */\n\nclass Button extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$e;\n } // Public\n\n\n toggle() {\n // Toggle class and sync the `aria-pressed` attribute with the return value of the `.toggle()` method\n this._element.setAttribute('aria-pressed', this._element.classList.toggle(CLASS_NAME_ACTIVE$3));\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Button.getOrCreateInstance(this);\n\n if (config === 'toggle') {\n data[config]();\n }\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$6, SELECTOR_DATA_TOGGLE$5, event => {\n event.preventDefault();\n const button = event.target.closest(SELECTOR_DATA_TOGGLE$5);\n const data = Button.getOrCreateInstance(button);\n data.toggle();\n});\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Button);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): dom/selector-engine.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst SelectorEngine = {\n find(selector, element = document.documentElement) {\n return [].concat(...Element.prototype.querySelectorAll.call(element, selector));\n },\n\n findOne(selector, element = document.documentElement) {\n return Element.prototype.querySelector.call(element, selector);\n },\n\n children(element, selector) {\n return [].concat(...element.children).filter(child => child.matches(selector));\n },\n\n parents(element, selector) {\n const parents = [];\n let ancestor = element.parentNode.closest(selector);\n\n while (ancestor) {\n parents.push(ancestor);\n ancestor = ancestor.parentNode.closest(selector);\n }\n\n return parents;\n },\n\n prev(element, selector) {\n let previous = element.previousElementSibling;\n\n while (previous) {\n if (previous.matches(selector)) {\n return [previous];\n }\n\n previous = previous.previousElementSibling;\n }\n\n return [];\n },\n\n // TODO: this is now unused; remove later along with prev()\n next(element, selector) {\n let next = element.nextElementSibling;\n\n while (next) {\n if (next.matches(selector)) {\n return [next];\n }\n\n next = next.nextElementSibling;\n }\n\n return [];\n },\n\n focusableChildren(element) {\n const focusables = ['a', 'button', 'input', 'textarea', 'select', 'details', '[tabindex]', '[contenteditable=\"true\"]'].map(selector => `${selector}:not([tabindex^=\"-\"])`).join(',');\n return this.find(focusables, element).filter(el => !isDisabled(el) && isVisible(el));\n }\n\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/swipe.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$d = 'swipe';\nconst EVENT_KEY$9 = '.bs.swipe';\nconst EVENT_TOUCHSTART = `touchstart${EVENT_KEY$9}`;\nconst EVENT_TOUCHMOVE = `touchmove${EVENT_KEY$9}`;\nconst EVENT_TOUCHEND = `touchend${EVENT_KEY$9}`;\nconst EVENT_POINTERDOWN = `pointerdown${EVENT_KEY$9}`;\nconst EVENT_POINTERUP = `pointerup${EVENT_KEY$9}`;\nconst POINTER_TYPE_TOUCH = 'touch';\nconst POINTER_TYPE_PEN = 'pen';\nconst CLASS_NAME_POINTER_EVENT = 'pointer-event';\nconst SWIPE_THRESHOLD = 40;\nconst Default$c = {\n endCallback: null,\n leftCallback: null,\n rightCallback: null\n};\nconst DefaultType$c = {\n endCallback: '(function|null)',\n leftCallback: '(function|null)',\n rightCallback: '(function|null)'\n};\n/**\n * Class definition\n */\n\nclass Swipe extends Config {\n constructor(element, config) {\n super();\n this._element = element;\n\n if (!element || !Swipe.isSupported()) {\n return;\n }\n\n this._config = this._getConfig(config);\n this._deltaX = 0;\n this._supportPointerEvents = Boolean(window.PointerEvent);\n\n this._initEvents();\n } // Getters\n\n\n static get Default() {\n return Default$c;\n }\n\n static get DefaultType() {\n return DefaultType$c;\n }\n\n static get NAME() {\n return NAME$d;\n } // Public\n\n\n dispose() {\n EventHandler.off(this._element, EVENT_KEY$9);\n } // Private\n\n\n _start(event) {\n if (!this._supportPointerEvents) {\n this._deltaX = event.touches[0].clientX;\n return;\n }\n\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX;\n }\n }\n\n _end(event) {\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX - this._deltaX;\n }\n\n this._handleSwipe();\n\n execute(this._config.endCallback);\n }\n\n _move(event) {\n this._deltaX = event.touches && event.touches.length > 1 ? 0 : event.touches[0].clientX - this._deltaX;\n }\n\n _handleSwipe() {\n const absDeltaX = Math.abs(this._deltaX);\n\n if (absDeltaX <= SWIPE_THRESHOLD) {\n return;\n }\n\n const direction = absDeltaX / this._deltaX;\n this._deltaX = 0;\n\n if (!direction) {\n return;\n }\n\n execute(direction > 0 ? this._config.rightCallback : this._config.leftCallback);\n }\n\n _initEvents() {\n if (this._supportPointerEvents) {\n EventHandler.on(this._element, EVENT_POINTERDOWN, event => this._start(event));\n EventHandler.on(this._element, EVENT_POINTERUP, event => this._end(event));\n\n this._element.classList.add(CLASS_NAME_POINTER_EVENT);\n } else {\n EventHandler.on(this._element, EVENT_TOUCHSTART, event => this._start(event));\n EventHandler.on(this._element, EVENT_TOUCHMOVE, event => this._move(event));\n EventHandler.on(this._element, EVENT_TOUCHEND, event => this._end(event));\n }\n }\n\n _eventIsPointerPenTouch(event) {\n return this._supportPointerEvents && (event.pointerType === POINTER_TYPE_PEN || event.pointerType === POINTER_TYPE_TOUCH);\n } // Static\n\n\n static isSupported() {\n return 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): carousel.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$c = 'carousel';\nconst DATA_KEY$8 = 'bs.carousel';\nconst EVENT_KEY$8 = `.${DATA_KEY$8}`;\nconst DATA_API_KEY$5 = '.data-api';\nconst ARROW_LEFT_KEY$1 = 'ArrowLeft';\nconst ARROW_RIGHT_KEY$1 = 'ArrowRight';\nconst TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch\n\nconst ORDER_NEXT = 'next';\nconst ORDER_PREV = 'prev';\nconst DIRECTION_LEFT = 'left';\nconst DIRECTION_RIGHT = 'right';\nconst EVENT_SLIDE = `slide${EVENT_KEY$8}`;\nconst EVENT_SLID = `slid${EVENT_KEY$8}`;\nconst EVENT_KEYDOWN$1 = `keydown${EVENT_KEY$8}`;\nconst EVENT_MOUSEENTER$1 = `mouseenter${EVENT_KEY$8}`;\nconst EVENT_MOUSELEAVE$1 = `mouseleave${EVENT_KEY$8}`;\nconst EVENT_DRAG_START = `dragstart${EVENT_KEY$8}`;\nconst EVENT_LOAD_DATA_API$3 = `load${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst EVENT_CLICK_DATA_API$5 = `click${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst CLASS_NAME_CAROUSEL = 'carousel';\nconst CLASS_NAME_ACTIVE$2 = 'active';\nconst CLASS_NAME_SLIDE = 'slide';\nconst CLASS_NAME_END = 'carousel-item-end';\nconst CLASS_NAME_START = 'carousel-item-start';\nconst CLASS_NAME_NEXT = 'carousel-item-next';\nconst CLASS_NAME_PREV = 'carousel-item-prev';\nconst SELECTOR_ACTIVE = '.active';\nconst SELECTOR_ITEM = '.carousel-item';\nconst SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM;\nconst SELECTOR_ITEM_IMG = '.carousel-item img';\nconst SELECTOR_INDICATORS = '.carousel-indicators';\nconst SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';\nconst SELECTOR_DATA_RIDE = '[data-bs-ride=\"carousel\"]';\nconst KEY_TO_DIRECTION = {\n [ARROW_LEFT_KEY$1]: DIRECTION_RIGHT,\n [ARROW_RIGHT_KEY$1]: DIRECTION_LEFT\n};\nconst Default$b = {\n interval: 5000,\n keyboard: true,\n pause: 'hover',\n ride: false,\n touch: true,\n wrap: true\n};\nconst DefaultType$b = {\n interval: '(number|boolean)',\n // TODO:v6 remove boolean support\n keyboard: 'boolean',\n pause: '(string|boolean)',\n ride: '(boolean|string)',\n touch: 'boolean',\n wrap: 'boolean'\n};\n/**\n * Class definition\n */\n\nclass Carousel extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._interval = null;\n this._activeElement = null;\n this._isSliding = false;\n this.touchTimeout = null;\n this._swipeHelper = null;\n this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);\n\n this._addEventListeners();\n\n if (this._config.ride === CLASS_NAME_CAROUSEL) {\n this.cycle();\n }\n } // Getters\n\n\n static get Default() {\n return Default$b;\n }\n\n static get DefaultType() {\n return DefaultType$b;\n }\n\n static get NAME() {\n return NAME$c;\n } // Public\n\n\n next() {\n this._slide(ORDER_NEXT);\n }\n\n nextWhenVisible() {\n // FIXME TODO use `document.visibilityState`\n // Don't call next when the page isn't visible\n // or the carousel or its parent isn't visible\n if (!document.hidden && isVisible(this._element)) {\n this.next();\n }\n }\n\n prev() {\n this._slide(ORDER_PREV);\n }\n\n pause() {\n if (this._isSliding) {\n triggerTransitionEnd(this._element);\n }\n\n this._clearInterval();\n }\n\n cycle() {\n this._clearInterval();\n\n this._updateInterval();\n\n this._interval = setInterval(() => this.nextWhenVisible(), this._config.interval);\n }\n\n _maybeEnableCycle() {\n if (!this._config.ride) {\n return;\n }\n\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.cycle());\n return;\n }\n\n this.cycle();\n }\n\n to(index) {\n const items = this._getItems();\n\n if (index > items.length - 1 || index < 0) {\n return;\n }\n\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.to(index));\n return;\n }\n\n const activeIndex = this._getItemIndex(this._getActive());\n\n if (activeIndex === index) {\n return;\n }\n\n const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;\n\n this._slide(order, items[index]);\n }\n\n dispose() {\n if (this._swipeHelper) {\n this._swipeHelper.dispose();\n }\n\n super.dispose();\n } // Private\n\n\n _configAfterMerge(config) {\n config.defaultInterval = config.interval;\n return config;\n }\n\n _addEventListeners() {\n if (this._config.keyboard) {\n EventHandler.on(this._element, EVENT_KEYDOWN$1, event => this._keydown(event));\n }\n\n if (this._config.pause === 'hover') {\n EventHandler.on(this._element, EVENT_MOUSEENTER$1, () => this.pause());\n EventHandler.on(this._element, EVENT_MOUSELEAVE$1, () => this._maybeEnableCycle());\n }\n\n if (this._config.touch && Swipe.isSupported()) {\n this._addTouchEventListeners();\n }\n }\n\n _addTouchEventListeners() {\n for (const img of SelectorEngine.find(SELECTOR_ITEM_IMG, this._element)) {\n EventHandler.on(img, EVENT_DRAG_START, event => event.preventDefault());\n }\n\n const endCallBack = () => {\n if (this._config.pause !== 'hover') {\n return;\n } // If it's a touch-enabled device, mouseenter/leave are fired as\n // part of the mouse compatibility events on first tap - the carousel\n // would stop cycling until user tapped out of it;\n // here, we listen for touchend, explicitly pause the carousel\n // (as if it's the second time we tap on it, mouseenter compat event\n // is NOT fired) and after a timeout (to allow for mouse compatibility\n // events to fire) we explicitly restart cycling\n\n\n this.pause();\n\n if (this.touchTimeout) {\n clearTimeout(this.touchTimeout);\n }\n\n this.touchTimeout = setTimeout(() => this._maybeEnableCycle(), TOUCHEVENT_COMPAT_WAIT + this._config.interval);\n };\n\n const swipeConfig = {\n leftCallback: () => this._slide(this._directionToOrder(DIRECTION_LEFT)),\n rightCallback: () => this._slide(this._directionToOrder(DIRECTION_RIGHT)),\n endCallback: endCallBack\n };\n this._swipeHelper = new Swipe(this._element, swipeConfig);\n }\n\n _keydown(event) {\n if (/input|textarea/i.test(event.target.tagName)) {\n return;\n }\n\n const direction = KEY_TO_DIRECTION[event.key];\n\n if (direction) {\n event.preventDefault();\n\n this._slide(this._directionToOrder(direction));\n }\n }\n\n _getItemIndex(element) {\n return this._getItems().indexOf(element);\n }\n\n _setActiveIndicatorElement(index) {\n if (!this._indicatorsElement) {\n return;\n }\n\n const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE, this._indicatorsElement);\n activeIndicator.classList.remove(CLASS_NAME_ACTIVE$2);\n activeIndicator.removeAttribute('aria-current');\n const newActiveIndicator = SelectorEngine.findOne(`[data-bs-slide-to=\"${index}\"]`, this._indicatorsElement);\n\n if (newActiveIndicator) {\n newActiveIndicator.classList.add(CLASS_NAME_ACTIVE$2);\n newActiveIndicator.setAttribute('aria-current', 'true');\n }\n }\n\n _updateInterval() {\n const element = this._activeElement || this._getActive();\n\n if (!element) {\n return;\n }\n\n const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);\n this._config.interval = elementInterval || this._config.defaultInterval;\n }\n\n _slide(order, element = null) {\n if (this._isSliding) {\n return;\n }\n\n const activeElement = this._getActive();\n\n const isNext = order === ORDER_NEXT;\n const nextElement = element || getNextActiveElement(this._getItems(), activeElement, isNext, this._config.wrap);\n\n if (nextElement === activeElement) {\n return;\n }\n\n const nextElementIndex = this._getItemIndex(nextElement);\n\n const triggerEvent = eventName => {\n return EventHandler.trigger(this._element, eventName, {\n relatedTarget: nextElement,\n direction: this._orderToDirection(order),\n from: this._getItemIndex(activeElement),\n to: nextElementIndex\n });\n };\n\n const slideEvent = triggerEvent(EVENT_SLIDE);\n\n if (slideEvent.defaultPrevented) {\n return;\n }\n\n if (!activeElement || !nextElement) {\n // Some weirdness is happening, so we bail\n // todo: change tests that use empty divs to avoid this check\n return;\n }\n\n const isCycling = Boolean(this._interval);\n this.pause();\n this._isSliding = true;\n\n this._setActiveIndicatorElement(nextElementIndex);\n\n this._activeElement = nextElement;\n const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;\n const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;\n nextElement.classList.add(orderClassName);\n reflow(nextElement);\n activeElement.classList.add(directionalClassName);\n nextElement.classList.add(directionalClassName);\n\n const completeCallBack = () => {\n nextElement.classList.remove(directionalClassName, orderClassName);\n nextElement.classList.add(CLASS_NAME_ACTIVE$2);\n activeElement.classList.remove(CLASS_NAME_ACTIVE$2, orderClassName, directionalClassName);\n this._isSliding = false;\n triggerEvent(EVENT_SLID);\n };\n\n this._queueCallback(completeCallBack, activeElement, this._isAnimated());\n\n if (isCycling) {\n this.cycle();\n }\n }\n\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_SLIDE);\n }\n\n _getActive() {\n return SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);\n }\n\n _getItems() {\n return SelectorEngine.find(SELECTOR_ITEM, this._element);\n }\n\n _clearInterval() {\n if (this._interval) {\n clearInterval(this._interval);\n this._interval = null;\n }\n }\n\n _directionToOrder(direction) {\n if (isRTL()) {\n return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;\n }\n\n return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;\n }\n\n _orderToDirection(order) {\n if (isRTL()) {\n return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;\n }\n\n return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Carousel.getOrCreateInstance(this, config);\n\n if (typeof config === 'number') {\n data.to(config);\n return;\n }\n\n if (typeof config === 'string') {\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config]();\n }\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$5, SELECTOR_DATA_SLIDE, function (event) {\n const target = getElementFromSelector(this);\n\n if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {\n return;\n }\n\n event.preventDefault();\n const carousel = Carousel.getOrCreateInstance(target);\n const slideIndex = this.getAttribute('data-bs-slide-to');\n\n if (slideIndex) {\n carousel.to(slideIndex);\n\n carousel._maybeEnableCycle();\n\n return;\n }\n\n if (Manipulator.getDataAttribute(this, 'slide') === 'next') {\n carousel.next();\n\n carousel._maybeEnableCycle();\n\n return;\n }\n\n carousel.prev();\n\n carousel._maybeEnableCycle();\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$3, () => {\n const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);\n\n for (const carousel of carousels) {\n Carousel.getOrCreateInstance(carousel);\n }\n});\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Carousel);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): collapse.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$b = 'collapse';\nconst DATA_KEY$7 = 'bs.collapse';\nconst EVENT_KEY$7 = `.${DATA_KEY$7}`;\nconst DATA_API_KEY$4 = '.data-api';\nconst EVENT_SHOW$6 = `show${EVENT_KEY$7}`;\nconst EVENT_SHOWN$6 = `shown${EVENT_KEY$7}`;\nconst EVENT_HIDE$6 = `hide${EVENT_KEY$7}`;\nconst EVENT_HIDDEN$6 = `hidden${EVENT_KEY$7}`;\nconst EVENT_CLICK_DATA_API$4 = `click${EVENT_KEY$7}${DATA_API_KEY$4}`;\nconst CLASS_NAME_SHOW$7 = 'show';\nconst CLASS_NAME_COLLAPSE = 'collapse';\nconst CLASS_NAME_COLLAPSING = 'collapsing';\nconst CLASS_NAME_COLLAPSED = 'collapsed';\nconst CLASS_NAME_DEEPER_CHILDREN = `:scope .${CLASS_NAME_COLLAPSE} .${CLASS_NAME_COLLAPSE}`;\nconst CLASS_NAME_HORIZONTAL = 'collapse-horizontal';\nconst WIDTH = 'width';\nconst HEIGHT = 'height';\nconst SELECTOR_ACTIVES = '.collapse.show, .collapse.collapsing';\nconst SELECTOR_DATA_TOGGLE$4 = '[data-bs-toggle=\"collapse\"]';\nconst Default$a = {\n parent: null,\n toggle: true\n};\nconst DefaultType$a = {\n parent: '(null|element)',\n toggle: 'boolean'\n};\n/**\n * Class definition\n */\n\nclass Collapse extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isTransitioning = false;\n this._triggerArray = [];\n const toggleList = SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);\n\n for (const elem of toggleList) {\n const selector = getSelectorFromElement(elem);\n const filterElement = SelectorEngine.find(selector).filter(foundElement => foundElement === this._element);\n\n if (selector !== null && filterElement.length) {\n this._triggerArray.push(elem);\n }\n }\n\n this._initializeChildren();\n\n if (!this._config.parent) {\n this._addAriaAndCollapsedClass(this._triggerArray, this._isShown());\n }\n\n if (this._config.toggle) {\n this.toggle();\n }\n } // Getters\n\n\n static get Default() {\n return Default$a;\n }\n\n static get DefaultType() {\n return DefaultType$a;\n }\n\n static get NAME() {\n return NAME$b;\n } // Public\n\n\n toggle() {\n if (this._isShown()) {\n this.hide();\n } else {\n this.show();\n }\n }\n\n show() {\n if (this._isTransitioning || this._isShown()) {\n return;\n }\n\n let activeChildren = []; // find active children\n\n if (this._config.parent) {\n activeChildren = this._getFirstLevelChildren(SELECTOR_ACTIVES).filter(element => element !== this._element).map(element => Collapse.getOrCreateInstance(element, {\n toggle: false\n }));\n }\n\n if (activeChildren.length && activeChildren[0]._isTransitioning) {\n return;\n }\n\n const startEvent = EventHandler.trigger(this._element, EVENT_SHOW$6);\n\n if (startEvent.defaultPrevented) {\n return;\n }\n\n for (const activeInstance of activeChildren) {\n activeInstance.hide();\n }\n\n const dimension = this._getDimension();\n\n this._element.classList.remove(CLASS_NAME_COLLAPSE);\n\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n\n this._element.style[dimension] = 0;\n\n this._addAriaAndCollapsedClass(this._triggerArray, true);\n\n this._isTransitioning = true;\n\n const complete = () => {\n this._isTransitioning = false;\n\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n\n this._element.classList.add(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n\n this._element.style[dimension] = '';\n EventHandler.trigger(this._element, EVENT_SHOWN$6);\n };\n\n const capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);\n const scrollSize = `scroll${capitalizedDimension}`;\n\n this._queueCallback(complete, this._element, true);\n\n this._element.style[dimension] = `${this._element[scrollSize]}px`;\n }\n\n hide() {\n if (this._isTransitioning || !this._isShown()) {\n return;\n }\n\n const startEvent = EventHandler.trigger(this._element, EVENT_HIDE$6);\n\n if (startEvent.defaultPrevented) {\n return;\n }\n\n const dimension = this._getDimension();\n\n this._element.style[dimension] = `${this._element.getBoundingClientRect()[dimension]}px`;\n reflow(this._element);\n\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n\n this._element.classList.remove(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n\n for (const trigger of this._triggerArray) {\n const element = getElementFromSelector(trigger);\n\n if (element && !this._isShown(element)) {\n this._addAriaAndCollapsedClass([trigger], false);\n }\n }\n\n this._isTransitioning = true;\n\n const complete = () => {\n this._isTransitioning = false;\n\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n\n this._element.classList.add(CLASS_NAME_COLLAPSE);\n\n EventHandler.trigger(this._element, EVENT_HIDDEN$6);\n };\n\n this._element.style[dimension] = '';\n\n this._queueCallback(complete, this._element, true);\n }\n\n _isShown(element = this._element) {\n return element.classList.contains(CLASS_NAME_SHOW$7);\n } // Private\n\n\n _configAfterMerge(config) {\n config.toggle = Boolean(config.toggle); // Coerce string values\n\n config.parent = getElement(config.parent);\n return config;\n }\n\n _getDimension() {\n return this._element.classList.contains(CLASS_NAME_HORIZONTAL) ? WIDTH : HEIGHT;\n }\n\n _initializeChildren() {\n if (!this._config.parent) {\n return;\n }\n\n const children = this._getFirstLevelChildren(SELECTOR_DATA_TOGGLE$4);\n\n for (const element of children) {\n const selected = getElementFromSelector(element);\n\n if (selected) {\n this._addAriaAndCollapsedClass([element], this._isShown(selected));\n }\n }\n }\n\n _getFirstLevelChildren(selector) {\n const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent); // remove children if greater depth\n\n return SelectorEngine.find(selector, this._config.parent).filter(element => !children.includes(element));\n }\n\n _addAriaAndCollapsedClass(triggerArray, isOpen) {\n if (!triggerArray.length) {\n return;\n }\n\n for (const element of triggerArray) {\n element.classList.toggle(CLASS_NAME_COLLAPSED, !isOpen);\n element.setAttribute('aria-expanded', isOpen);\n }\n } // Static\n\n\n static jQueryInterface(config) {\n const _config = {};\n\n if (typeof config === 'string' && /show|hide/.test(config)) {\n _config.toggle = false;\n }\n\n return this.each(function () {\n const data = Collapse.getOrCreateInstance(this, _config);\n\n if (typeof config === 'string') {\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config]();\n }\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$4, SELECTOR_DATA_TOGGLE$4, function (event) {\n // preventDefault only for elements (which change the URL) not inside the collapsible element\n if (event.target.tagName === 'A' || event.delegateTarget && event.delegateTarget.tagName === 'A') {\n event.preventDefault();\n }\n\n const selector = getSelectorFromElement(this);\n const selectorElements = SelectorEngine.find(selector);\n\n for (const element of selectorElements) {\n Collapse.getOrCreateInstance(element, {\n toggle: false\n }).toggle();\n }\n});\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Collapse);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): dropdown.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$a = 'dropdown';\nconst DATA_KEY$6 = 'bs.dropdown';\nconst EVENT_KEY$6 = `.${DATA_KEY$6}`;\nconst DATA_API_KEY$3 = '.data-api';\nconst ESCAPE_KEY$2 = 'Escape';\nconst TAB_KEY$1 = 'Tab';\nconst ARROW_UP_KEY$1 = 'ArrowUp';\nconst ARROW_DOWN_KEY$1 = 'ArrowDown';\nconst RIGHT_MOUSE_BUTTON = 2; // MouseEvent.button value for the secondary button, usually the right button\n\nconst EVENT_HIDE$5 = `hide${EVENT_KEY$6}`;\nconst EVENT_HIDDEN$5 = `hidden${EVENT_KEY$6}`;\nconst EVENT_SHOW$5 = `show${EVENT_KEY$6}`;\nconst EVENT_SHOWN$5 = `shown${EVENT_KEY$6}`;\nconst EVENT_CLICK_DATA_API$3 = `click${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYDOWN_DATA_API = `keydown${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYUP_DATA_API = `keyup${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst CLASS_NAME_SHOW$6 = 'show';\nconst CLASS_NAME_DROPUP = 'dropup';\nconst CLASS_NAME_DROPEND = 'dropend';\nconst CLASS_NAME_DROPSTART = 'dropstart';\nconst CLASS_NAME_DROPUP_CENTER = 'dropup-center';\nconst CLASS_NAME_DROPDOWN_CENTER = 'dropdown-center';\nconst SELECTOR_DATA_TOGGLE$3 = '[data-bs-toggle=\"dropdown\"]:not(.disabled):not(:disabled)';\nconst SELECTOR_DATA_TOGGLE_SHOWN = `${SELECTOR_DATA_TOGGLE$3}.${CLASS_NAME_SHOW$6}`;\nconst SELECTOR_MENU = '.dropdown-menu';\nconst SELECTOR_NAVBAR = '.navbar';\nconst SELECTOR_NAVBAR_NAV = '.navbar-nav';\nconst SELECTOR_VISIBLE_ITEMS = '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)';\nconst PLACEMENT_TOP = isRTL() ? 'top-end' : 'top-start';\nconst PLACEMENT_TOPEND = isRTL() ? 'top-start' : 'top-end';\nconst PLACEMENT_BOTTOM = isRTL() ? 'bottom-end' : 'bottom-start';\nconst PLACEMENT_BOTTOMEND = isRTL() ? 'bottom-start' : 'bottom-end';\nconst PLACEMENT_RIGHT = isRTL() ? 'left-start' : 'right-start';\nconst PLACEMENT_LEFT = isRTL() ? 'right-start' : 'left-start';\nconst PLACEMENT_TOPCENTER = 'top';\nconst PLACEMENT_BOTTOMCENTER = 'bottom';\nconst Default$9 = {\n autoClose: true,\n boundary: 'clippingParents',\n display: 'dynamic',\n offset: [0, 2],\n popperConfig: null,\n reference: 'toggle'\n};\nconst DefaultType$9 = {\n autoClose: '(boolean|string)',\n boundary: '(string|element)',\n display: 'string',\n offset: '(array|string|function)',\n popperConfig: '(null|object|function)',\n reference: '(string|element|object)'\n};\n/**\n * Class definition\n */\n\nclass Dropdown extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._popper = null;\n this._parent = this._element.parentNode; // dropdown wrapper\n // todo: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.2/forms/input-group/\n\n this._menu = SelectorEngine.next(this._element, SELECTOR_MENU)[0] || SelectorEngine.prev(this._element, SELECTOR_MENU)[0] || SelectorEngine.findOne(SELECTOR_MENU, this._parent);\n this._inNavbar = this._detectNavbar();\n } // Getters\n\n\n static get Default() {\n return Default$9;\n }\n\n static get DefaultType() {\n return DefaultType$9;\n }\n\n static get NAME() {\n return NAME$a;\n } // Public\n\n\n toggle() {\n return this._isShown() ? this.hide() : this.show();\n }\n\n show() {\n if (isDisabled(this._element) || this._isShown()) {\n return;\n }\n\n const relatedTarget = {\n relatedTarget: this._element\n };\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$5, relatedTarget);\n\n if (showEvent.defaultPrevented) {\n return;\n }\n\n this._createPopper(); // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n\n\n if ('ontouchstart' in document.documentElement && !this._parent.closest(SELECTOR_NAVBAR_NAV)) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n\n this._element.focus();\n\n this._element.setAttribute('aria-expanded', true);\n\n this._menu.classList.add(CLASS_NAME_SHOW$6);\n\n this._element.classList.add(CLASS_NAME_SHOW$6);\n\n EventHandler.trigger(this._element, EVENT_SHOWN$5, relatedTarget);\n }\n\n hide() {\n if (isDisabled(this._element) || !this._isShown()) {\n return;\n }\n\n const relatedTarget = {\n relatedTarget: this._element\n };\n\n this._completeHide(relatedTarget);\n }\n\n dispose() {\n if (this._popper) {\n this._popper.destroy();\n }\n\n super.dispose();\n }\n\n update() {\n this._inNavbar = this._detectNavbar();\n\n if (this._popper) {\n this._popper.update();\n }\n } // Private\n\n\n _completeHide(relatedTarget) {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$5, relatedTarget);\n\n if (hideEvent.defaultPrevented) {\n return;\n } // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n\n\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n\n if (this._popper) {\n this._popper.destroy();\n }\n\n this._menu.classList.remove(CLASS_NAME_SHOW$6);\n\n this._element.classList.remove(CLASS_NAME_SHOW$6);\n\n this._element.setAttribute('aria-expanded', 'false');\n\n Manipulator.removeDataAttribute(this._menu, 'popper');\n EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);\n }\n\n _getConfig(config) {\n config = super._getConfig(config);\n\n if (typeof config.reference === 'object' && !isElement(config.reference) && typeof config.reference.getBoundingClientRect !== 'function') {\n // Popper virtual elements require a getBoundingClientRect method\n throw new TypeError(`${NAME$a.toUpperCase()}: Option \"reference\" provided type \"object\" without a required \"getBoundingClientRect\" method.`);\n }\n\n return config;\n }\n\n _createPopper() {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s dropdowns require Popper (https://popper.js.org)');\n }\n\n let referenceElement = this._element;\n\n if (this._config.reference === 'parent') {\n referenceElement = this._parent;\n } else if (isElement(this._config.reference)) {\n referenceElement = getElement(this._config.reference);\n } else if (typeof this._config.reference === 'object') {\n referenceElement = this._config.reference;\n }\n\n const popperConfig = this._getPopperConfig();\n\n this._popper = Popper.createPopper(referenceElement, this._menu, popperConfig);\n }\n\n _isShown() {\n return this._menu.classList.contains(CLASS_NAME_SHOW$6);\n }\n\n _getPlacement() {\n const parentDropdown = this._parent;\n\n if (parentDropdown.classList.contains(CLASS_NAME_DROPEND)) {\n return PLACEMENT_RIGHT;\n }\n\n if (parentDropdown.classList.contains(CLASS_NAME_DROPSTART)) {\n return PLACEMENT_LEFT;\n }\n\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP_CENTER)) {\n return PLACEMENT_TOPCENTER;\n }\n\n if (parentDropdown.classList.contains(CLASS_NAME_DROPDOWN_CENTER)) {\n return PLACEMENT_BOTTOMCENTER;\n } // We need to trim the value because custom properties can also include spaces\n\n\n const isEnd = getComputedStyle(this._menu).getPropertyValue('--bs-position').trim() === 'end';\n\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP)) {\n return isEnd ? PLACEMENT_TOPEND : PLACEMENT_TOP;\n }\n\n return isEnd ? PLACEMENT_BOTTOMEND : PLACEMENT_BOTTOM;\n }\n\n _detectNavbar() {\n return this._element.closest(SELECTOR_NAVBAR) !== null;\n }\n\n _getOffset() {\n const {\n offset\n } = this._config;\n\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n\n return offset;\n }\n\n _getPopperConfig() {\n const defaultBsPopperConfig = {\n placement: this._getPlacement(),\n modifiers: [{\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }]\n }; // Disable Popper if we have a static display or Dropdown is in Navbar\n\n if (this._inNavbar || this._config.display === 'static') {\n Manipulator.setDataAttribute(this._menu, 'popper', 'static'); // todo:v6 remove\n\n defaultBsPopperConfig.modifiers = [{\n name: 'applyStyles',\n enabled: false\n }];\n }\n\n return { ...defaultBsPopperConfig,\n ...(typeof this._config.popperConfig === 'function' ? this._config.popperConfig(defaultBsPopperConfig) : this._config.popperConfig)\n };\n }\n\n _selectMenuItem({\n key,\n target\n }) {\n const items = SelectorEngine.find(SELECTOR_VISIBLE_ITEMS, this._menu).filter(element => isVisible(element));\n\n if (!items.length) {\n return;\n } // if target isn't included in items (e.g. when expanding the dropdown)\n // allow cycling to get the last item in case key equals ARROW_UP_KEY\n\n\n getNextActiveElement(items, target, key === ARROW_DOWN_KEY$1, !items.includes(target)).focus();\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Dropdown.getOrCreateInstance(this, config);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config]();\n });\n }\n\n static clearMenus(event) {\n if (event.button === RIGHT_MOUSE_BUTTON || event.type === 'keyup' && event.key !== TAB_KEY$1) {\n return;\n }\n\n const openToggles = SelectorEngine.find(SELECTOR_DATA_TOGGLE_SHOWN);\n\n for (const toggle of openToggles) {\n const context = Dropdown.getInstance(toggle);\n\n if (!context || context._config.autoClose === false) {\n continue;\n }\n\n const composedPath = event.composedPath();\n const isMenuTarget = composedPath.includes(context._menu);\n\n if (composedPath.includes(context._element) || context._config.autoClose === 'inside' && !isMenuTarget || context._config.autoClose === 'outside' && isMenuTarget) {\n continue;\n } // Tab navigation through the dropdown menu or events from contained inputs shouldn't close the menu\n\n\n if (context._menu.contains(event.target) && (event.type === 'keyup' && event.key === TAB_KEY$1 || /input|select|option|textarea|form/i.test(event.target.tagName))) {\n continue;\n }\n\n const relatedTarget = {\n relatedTarget: context._element\n };\n\n if (event.type === 'click') {\n relatedTarget.clickEvent = event;\n }\n\n context._completeHide(relatedTarget);\n }\n }\n\n static dataApiKeydownHandler(event) {\n // If not an UP | DOWN | ESCAPE key => not a dropdown command\n // If input/textarea && if key is other than ESCAPE => not a dropdown command\n const isInput = /input|textarea/i.test(event.target.tagName);\n const isEscapeEvent = event.key === ESCAPE_KEY$2;\n const isUpOrDownEvent = [ARROW_UP_KEY$1, ARROW_DOWN_KEY$1].includes(event.key);\n\n if (!isUpOrDownEvent && !isEscapeEvent) {\n return;\n }\n\n if (isInput && !isEscapeEvent) {\n return;\n }\n\n event.preventDefault(); // todo: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.2/forms/input-group/\n\n const getToggleButton = this.matches(SELECTOR_DATA_TOGGLE$3) ? this : SelectorEngine.prev(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.next(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.findOne(SELECTOR_DATA_TOGGLE$3, event.delegateTarget.parentNode);\n const instance = Dropdown.getOrCreateInstance(getToggleButton);\n\n if (isUpOrDownEvent) {\n event.stopPropagation();\n instance.show();\n\n instance._selectMenuItem(event);\n\n return;\n }\n\n if (instance._isShown()) {\n // else is escape and we check if it is shown\n event.stopPropagation();\n instance.hide();\n getToggleButton.focus();\n }\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_DATA_TOGGLE$3, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_MENU, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_KEYUP_DATA_API, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, SELECTOR_DATA_TOGGLE$3, function (event) {\n event.preventDefault();\n Dropdown.getOrCreateInstance(this).toggle();\n});\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Dropdown);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/scrollBar.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst SELECTOR_FIXED_CONTENT = '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top';\nconst SELECTOR_STICKY_CONTENT = '.sticky-top';\nconst PROPERTY_PADDING = 'padding-right';\nconst PROPERTY_MARGIN = 'margin-right';\n/**\n * Class definition\n */\n\nclass ScrollBarHelper {\n constructor() {\n this._element = document.body;\n } // Public\n\n\n getWidth() {\n // https://developer.mozilla.org/en-US/docs/Web/API/Window/innerWidth#usage_notes\n const documentWidth = document.documentElement.clientWidth;\n return Math.abs(window.innerWidth - documentWidth);\n }\n\n hide() {\n const width = this.getWidth();\n\n this._disableOverFlow(); // give padding to element to balance the hidden scrollbar width\n\n\n this._setElementAttributes(this._element, PROPERTY_PADDING, calculatedValue => calculatedValue + width); // trick: We adjust positive paddingRight and negative marginRight to sticky-top elements to keep showing fullwidth\n\n\n this._setElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n\n this._setElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN, calculatedValue => calculatedValue - width);\n }\n\n reset() {\n this._resetElementAttributes(this._element, 'overflow');\n\n this._resetElementAttributes(this._element, PROPERTY_PADDING);\n\n this._resetElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING);\n\n this._resetElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN);\n }\n\n isOverflowing() {\n return this.getWidth() > 0;\n } // Private\n\n\n _disableOverFlow() {\n this._saveInitialAttribute(this._element, 'overflow');\n\n this._element.style.overflow = 'hidden';\n }\n\n _setElementAttributes(selector, styleProperty, callback) {\n const scrollbarWidth = this.getWidth();\n\n const manipulationCallBack = element => {\n if (element !== this._element && window.innerWidth > element.clientWidth + scrollbarWidth) {\n return;\n }\n\n this._saveInitialAttribute(element, styleProperty);\n\n const calculatedValue = window.getComputedStyle(element).getPropertyValue(styleProperty);\n element.style.setProperty(styleProperty, `${callback(Number.parseFloat(calculatedValue))}px`);\n };\n\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n\n _saveInitialAttribute(element, styleProperty) {\n const actualValue = element.style.getPropertyValue(styleProperty);\n\n if (actualValue) {\n Manipulator.setDataAttribute(element, styleProperty, actualValue);\n }\n }\n\n _resetElementAttributes(selector, styleProperty) {\n const manipulationCallBack = element => {\n const value = Manipulator.getDataAttribute(element, styleProperty); // We only want to remove the property if the value is `null`; the value can also be zero\n\n if (value === null) {\n element.style.removeProperty(styleProperty);\n return;\n }\n\n Manipulator.removeDataAttribute(element, styleProperty);\n element.style.setProperty(styleProperty, value);\n };\n\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n\n _applyManipulationCallback(selector, callBack) {\n if (isElement(selector)) {\n callBack(selector);\n return;\n }\n\n for (const sel of SelectorEngine.find(selector, this._element)) {\n callBack(sel);\n }\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/backdrop.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$9 = 'backdrop';\nconst CLASS_NAME_FADE$4 = 'fade';\nconst CLASS_NAME_SHOW$5 = 'show';\nconst EVENT_MOUSEDOWN = `mousedown.bs.${NAME$9}`;\nconst Default$8 = {\n className: 'modal-backdrop',\n clickCallback: null,\n isAnimated: false,\n isVisible: true,\n // if false, we use the backdrop helper without adding any element to the dom\n rootElement: 'body' // give the choice to place backdrop under different elements\n\n};\nconst DefaultType$8 = {\n className: 'string',\n clickCallback: '(function|null)',\n isAnimated: 'boolean',\n isVisible: 'boolean',\n rootElement: '(element|string)'\n};\n/**\n * Class definition\n */\n\nclass Backdrop extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isAppended = false;\n this._element = null;\n } // Getters\n\n\n static get Default() {\n return Default$8;\n }\n\n static get DefaultType() {\n return DefaultType$8;\n }\n\n static get NAME() {\n return NAME$9;\n } // Public\n\n\n show(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n\n this._append();\n\n const element = this._getElement();\n\n if (this._config.isAnimated) {\n reflow(element);\n }\n\n element.classList.add(CLASS_NAME_SHOW$5);\n\n this._emulateAnimation(() => {\n execute(callback);\n });\n }\n\n hide(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n\n this._getElement().classList.remove(CLASS_NAME_SHOW$5);\n\n this._emulateAnimation(() => {\n this.dispose();\n execute(callback);\n });\n }\n\n dispose() {\n if (!this._isAppended) {\n return;\n }\n\n EventHandler.off(this._element, EVENT_MOUSEDOWN);\n\n this._element.remove();\n\n this._isAppended = false;\n } // Private\n\n\n _getElement() {\n if (!this._element) {\n const backdrop = document.createElement('div');\n backdrop.className = this._config.className;\n\n if (this._config.isAnimated) {\n backdrop.classList.add(CLASS_NAME_FADE$4);\n }\n\n this._element = backdrop;\n }\n\n return this._element;\n }\n\n _configAfterMerge(config) {\n // use getElement() with the default \"body\" to get a fresh Element on each instantiation\n config.rootElement = getElement(config.rootElement);\n return config;\n }\n\n _append() {\n if (this._isAppended) {\n return;\n }\n\n const element = this._getElement();\n\n this._config.rootElement.append(element);\n\n EventHandler.on(element, EVENT_MOUSEDOWN, () => {\n execute(this._config.clickCallback);\n });\n this._isAppended = true;\n }\n\n _emulateAnimation(callback) {\n executeAfterTransition(callback, this._getElement(), this._config.isAnimated);\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/focustrap.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$8 = 'focustrap';\nconst DATA_KEY$5 = 'bs.focustrap';\nconst EVENT_KEY$5 = `.${DATA_KEY$5}`;\nconst EVENT_FOCUSIN$2 = `focusin${EVENT_KEY$5}`;\nconst EVENT_KEYDOWN_TAB = `keydown.tab${EVENT_KEY$5}`;\nconst TAB_KEY = 'Tab';\nconst TAB_NAV_FORWARD = 'forward';\nconst TAB_NAV_BACKWARD = 'backward';\nconst Default$7 = {\n autofocus: true,\n trapElement: null // The element to trap focus inside of\n\n};\nconst DefaultType$7 = {\n autofocus: 'boolean',\n trapElement: 'element'\n};\n/**\n * Class definition\n */\n\nclass FocusTrap extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isActive = false;\n this._lastTabNavDirection = null;\n } // Getters\n\n\n static get Default() {\n return Default$7;\n }\n\n static get DefaultType() {\n return DefaultType$7;\n }\n\n static get NAME() {\n return NAME$8;\n } // Public\n\n\n activate() {\n if (this._isActive) {\n return;\n }\n\n if (this._config.autofocus) {\n this._config.trapElement.focus();\n }\n\n EventHandler.off(document, EVENT_KEY$5); // guard against infinite focus loop\n\n EventHandler.on(document, EVENT_FOCUSIN$2, event => this._handleFocusin(event));\n EventHandler.on(document, EVENT_KEYDOWN_TAB, event => this._handleKeydown(event));\n this._isActive = true;\n }\n\n deactivate() {\n if (!this._isActive) {\n return;\n }\n\n this._isActive = false;\n EventHandler.off(document, EVENT_KEY$5);\n } // Private\n\n\n _handleFocusin(event) {\n const {\n trapElement\n } = this._config;\n\n if (event.target === document || event.target === trapElement || trapElement.contains(event.target)) {\n return;\n }\n\n const elements = SelectorEngine.focusableChildren(trapElement);\n\n if (elements.length === 0) {\n trapElement.focus();\n } else if (this._lastTabNavDirection === TAB_NAV_BACKWARD) {\n elements[elements.length - 1].focus();\n } else {\n elements[0].focus();\n }\n }\n\n _handleKeydown(event) {\n if (event.key !== TAB_KEY) {\n return;\n }\n\n this._lastTabNavDirection = event.shiftKey ? TAB_NAV_BACKWARD : TAB_NAV_FORWARD;\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): modal.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$7 = 'modal';\nconst DATA_KEY$4 = 'bs.modal';\nconst EVENT_KEY$4 = `.${DATA_KEY$4}`;\nconst DATA_API_KEY$2 = '.data-api';\nconst ESCAPE_KEY$1 = 'Escape';\nconst EVENT_HIDE$4 = `hide${EVENT_KEY$4}`;\nconst EVENT_HIDE_PREVENTED$1 = `hidePrevented${EVENT_KEY$4}`;\nconst EVENT_HIDDEN$4 = `hidden${EVENT_KEY$4}`;\nconst EVENT_SHOW$4 = `show${EVENT_KEY$4}`;\nconst EVENT_SHOWN$4 = `shown${EVENT_KEY$4}`;\nconst EVENT_RESIZE$1 = `resize${EVENT_KEY$4}`;\nconst EVENT_CLICK_DISMISS = `click.dismiss${EVENT_KEY$4}`;\nconst EVENT_MOUSEDOWN_DISMISS = `mousedown.dismiss${EVENT_KEY$4}`;\nconst EVENT_KEYDOWN_DISMISS$1 = `keydown.dismiss${EVENT_KEY$4}`;\nconst EVENT_CLICK_DATA_API$2 = `click${EVENT_KEY$4}${DATA_API_KEY$2}`;\nconst CLASS_NAME_OPEN = 'modal-open';\nconst CLASS_NAME_FADE$3 = 'fade';\nconst CLASS_NAME_SHOW$4 = 'show';\nconst CLASS_NAME_STATIC = 'modal-static';\nconst OPEN_SELECTOR$1 = '.modal.show';\nconst SELECTOR_DIALOG = '.modal-dialog';\nconst SELECTOR_MODAL_BODY = '.modal-body';\nconst SELECTOR_DATA_TOGGLE$2 = '[data-bs-toggle=\"modal\"]';\nconst Default$6 = {\n backdrop: true,\n focus: true,\n keyboard: true\n};\nconst DefaultType$6 = {\n backdrop: '(boolean|string)',\n focus: 'boolean',\n keyboard: 'boolean'\n};\n/**\n * Class definition\n */\n\nclass Modal extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._dialog = SelectorEngine.findOne(SELECTOR_DIALOG, this._element);\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._isShown = false;\n this._isTransitioning = false;\n this._scrollBar = new ScrollBarHelper();\n\n this._addEventListeners();\n } // Getters\n\n\n static get Default() {\n return Default$6;\n }\n\n static get DefaultType() {\n return DefaultType$6;\n }\n\n static get NAME() {\n return NAME$7;\n } // Public\n\n\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n\n show(relatedTarget) {\n if (this._isShown || this._isTransitioning) {\n return;\n }\n\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$4, {\n relatedTarget\n });\n\n if (showEvent.defaultPrevented) {\n return;\n }\n\n this._isShown = true;\n this._isTransitioning = true;\n\n this._scrollBar.hide();\n\n document.body.classList.add(CLASS_NAME_OPEN);\n\n this._adjustDialog();\n\n this._backdrop.show(() => this._showElement(relatedTarget));\n }\n\n hide() {\n if (!this._isShown || this._isTransitioning) {\n return;\n }\n\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$4);\n\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n this._isShown = false;\n this._isTransitioning = true;\n\n this._focustrap.deactivate();\n\n this._element.classList.remove(CLASS_NAME_SHOW$4);\n\n this._queueCallback(() => this._hideModal(), this._element, this._isAnimated());\n }\n\n dispose() {\n for (const htmlElement of [window, this._dialog]) {\n EventHandler.off(htmlElement, EVENT_KEY$4);\n }\n\n this._backdrop.dispose();\n\n this._focustrap.deactivate();\n\n super.dispose();\n }\n\n handleUpdate() {\n this._adjustDialog();\n } // Private\n\n\n _initializeBackDrop() {\n return new Backdrop({\n isVisible: Boolean(this._config.backdrop),\n // 'static' option will be translated to true, and booleans will keep their value,\n isAnimated: this._isAnimated()\n });\n }\n\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n\n _showElement(relatedTarget) {\n // try to append dynamic modal\n if (!document.body.contains(this._element)) {\n document.body.append(this._element);\n }\n\n this._element.style.display = 'block';\n\n this._element.removeAttribute('aria-hidden');\n\n this._element.setAttribute('aria-modal', true);\n\n this._element.setAttribute('role', 'dialog');\n\n this._element.scrollTop = 0;\n const modalBody = SelectorEngine.findOne(SELECTOR_MODAL_BODY, this._dialog);\n\n if (modalBody) {\n modalBody.scrollTop = 0;\n }\n\n reflow(this._element);\n\n this._element.classList.add(CLASS_NAME_SHOW$4);\n\n const transitionComplete = () => {\n if (this._config.focus) {\n this._focustrap.activate();\n }\n\n this._isTransitioning = false;\n EventHandler.trigger(this._element, EVENT_SHOWN$4, {\n relatedTarget\n });\n };\n\n this._queueCallback(transitionComplete, this._dialog, this._isAnimated());\n }\n\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS$1, event => {\n if (event.key !== ESCAPE_KEY$1) {\n return;\n }\n\n if (this._config.keyboard) {\n event.preventDefault();\n this.hide();\n return;\n }\n\n this._triggerBackdropTransition();\n });\n EventHandler.on(window, EVENT_RESIZE$1, () => {\n if (this._isShown && !this._isTransitioning) {\n this._adjustDialog();\n }\n });\n EventHandler.on(this._element, EVENT_MOUSEDOWN_DISMISS, event => {\n // a bad trick to segregate clicks that may start inside dialog but end outside, and avoid listen to scrollbar clicks\n EventHandler.one(this._element, EVENT_CLICK_DISMISS, event2 => {\n if (this._element !== event.target || this._element !== event2.target) {\n return;\n }\n\n if (this._config.backdrop === 'static') {\n this._triggerBackdropTransition();\n\n return;\n }\n\n if (this._config.backdrop) {\n this.hide();\n }\n });\n });\n }\n\n _hideModal() {\n this._element.style.display = 'none';\n\n this._element.setAttribute('aria-hidden', true);\n\n this._element.removeAttribute('aria-modal');\n\n this._element.removeAttribute('role');\n\n this._isTransitioning = false;\n\n this._backdrop.hide(() => {\n document.body.classList.remove(CLASS_NAME_OPEN);\n\n this._resetAdjustments();\n\n this._scrollBar.reset();\n\n EventHandler.trigger(this._element, EVENT_HIDDEN$4);\n });\n }\n\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_FADE$3);\n }\n\n _triggerBackdropTransition() {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED$1);\n\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const initialOverflowY = this._element.style.overflowY; // return if the following background transition hasn't yet completed\n\n if (initialOverflowY === 'hidden' || this._element.classList.contains(CLASS_NAME_STATIC)) {\n return;\n }\n\n if (!isModalOverflowing) {\n this._element.style.overflowY = 'hidden';\n }\n\n this._element.classList.add(CLASS_NAME_STATIC);\n\n this._queueCallback(() => {\n this._element.classList.remove(CLASS_NAME_STATIC);\n\n this._queueCallback(() => {\n this._element.style.overflowY = initialOverflowY;\n }, this._dialog);\n }, this._dialog);\n\n this._element.focus();\n }\n /**\n * The following methods are used to handle overflowing modals\n */\n\n\n _adjustDialog() {\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n\n const scrollbarWidth = this._scrollBar.getWidth();\n\n const isBodyOverflowing = scrollbarWidth > 0;\n\n if (isBodyOverflowing && !isModalOverflowing) {\n const property = isRTL() ? 'paddingLeft' : 'paddingRight';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n\n if (!isBodyOverflowing && isModalOverflowing) {\n const property = isRTL() ? 'paddingRight' : 'paddingLeft';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n }\n\n _resetAdjustments() {\n this._element.style.paddingLeft = '';\n this._element.style.paddingRight = '';\n } // Static\n\n\n static jQueryInterface(config, relatedTarget) {\n return this.each(function () {\n const data = Modal.getOrCreateInstance(this, config);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config](relatedTarget);\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$2, SELECTOR_DATA_TOGGLE$2, function (event) {\n const target = getElementFromSelector(this);\n\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n\n EventHandler.one(target, EVENT_SHOW$4, showEvent => {\n if (showEvent.defaultPrevented) {\n // only register focus restorer if modal will actually get shown\n return;\n }\n\n EventHandler.one(target, EVENT_HIDDEN$4, () => {\n if (isVisible(this)) {\n this.focus();\n }\n });\n }); // avoid conflict when clicking modal toggler while another one is open\n\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR$1);\n\n if (alreadyOpen) {\n Modal.getInstance(alreadyOpen).hide();\n }\n\n const data = Modal.getOrCreateInstance(target);\n data.toggle(this);\n});\nenableDismissTrigger(Modal);\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Modal);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): offcanvas.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$6 = 'offcanvas';\nconst DATA_KEY$3 = 'bs.offcanvas';\nconst EVENT_KEY$3 = `.${DATA_KEY$3}`;\nconst DATA_API_KEY$1 = '.data-api';\nconst EVENT_LOAD_DATA_API$2 = `load${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst ESCAPE_KEY = 'Escape';\nconst CLASS_NAME_SHOW$3 = 'show';\nconst CLASS_NAME_SHOWING$1 = 'showing';\nconst CLASS_NAME_HIDING = 'hiding';\nconst CLASS_NAME_BACKDROP = 'offcanvas-backdrop';\nconst OPEN_SELECTOR = '.offcanvas.show';\nconst EVENT_SHOW$3 = `show${EVENT_KEY$3}`;\nconst EVENT_SHOWN$3 = `shown${EVENT_KEY$3}`;\nconst EVENT_HIDE$3 = `hide${EVENT_KEY$3}`;\nconst EVENT_HIDE_PREVENTED = `hidePrevented${EVENT_KEY$3}`;\nconst EVENT_HIDDEN$3 = `hidden${EVENT_KEY$3}`;\nconst EVENT_RESIZE = `resize${EVENT_KEY$3}`;\nconst EVENT_CLICK_DATA_API$1 = `click${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst EVENT_KEYDOWN_DISMISS = `keydown.dismiss${EVENT_KEY$3}`;\nconst SELECTOR_DATA_TOGGLE$1 = '[data-bs-toggle=\"offcanvas\"]';\nconst Default$5 = {\n backdrop: true,\n keyboard: true,\n scroll: false\n};\nconst DefaultType$5 = {\n backdrop: '(boolean|string)',\n keyboard: 'boolean',\n scroll: 'boolean'\n};\n/**\n * Class definition\n */\n\nclass Offcanvas extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isShown = false;\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n\n this._addEventListeners();\n } // Getters\n\n\n static get Default() {\n return Default$5;\n }\n\n static get DefaultType() {\n return DefaultType$5;\n }\n\n static get NAME() {\n return NAME$6;\n } // Public\n\n\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n\n show(relatedTarget) {\n if (this._isShown) {\n return;\n }\n\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$3, {\n relatedTarget\n });\n\n if (showEvent.defaultPrevented) {\n return;\n }\n\n this._isShown = true;\n\n this._backdrop.show();\n\n if (!this._config.scroll) {\n new ScrollBarHelper().hide();\n }\n\n this._element.setAttribute('aria-modal', true);\n\n this._element.setAttribute('role', 'dialog');\n\n this._element.classList.add(CLASS_NAME_SHOWING$1);\n\n const completeCallBack = () => {\n if (!this._config.scroll || this._config.backdrop) {\n this._focustrap.activate();\n }\n\n this._element.classList.add(CLASS_NAME_SHOW$3);\n\n this._element.classList.remove(CLASS_NAME_SHOWING$1);\n\n EventHandler.trigger(this._element, EVENT_SHOWN$3, {\n relatedTarget\n });\n };\n\n this._queueCallback(completeCallBack, this._element, true);\n }\n\n hide() {\n if (!this._isShown) {\n return;\n }\n\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$3);\n\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n this._focustrap.deactivate();\n\n this._element.blur();\n\n this._isShown = false;\n\n this._element.classList.add(CLASS_NAME_HIDING);\n\n this._backdrop.hide();\n\n const completeCallback = () => {\n this._element.classList.remove(CLASS_NAME_SHOW$3, CLASS_NAME_HIDING);\n\n this._element.removeAttribute('aria-modal');\n\n this._element.removeAttribute('role');\n\n if (!this._config.scroll) {\n new ScrollBarHelper().reset();\n }\n\n EventHandler.trigger(this._element, EVENT_HIDDEN$3);\n };\n\n this._queueCallback(completeCallback, this._element, true);\n }\n\n dispose() {\n this._backdrop.dispose();\n\n this._focustrap.deactivate();\n\n super.dispose();\n } // Private\n\n\n _initializeBackDrop() {\n const clickCallback = () => {\n if (this._config.backdrop === 'static') {\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n return;\n }\n\n this.hide();\n }; // 'static' option will be translated to true, and booleans will keep their value\n\n\n const isVisible = Boolean(this._config.backdrop);\n return new Backdrop({\n className: CLASS_NAME_BACKDROP,\n isVisible,\n isAnimated: true,\n rootElement: this._element.parentNode,\n clickCallback: isVisible ? clickCallback : null\n });\n }\n\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS, event => {\n if (event.key !== ESCAPE_KEY) {\n return;\n }\n\n if (!this._config.keyboard) {\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n return;\n }\n\n this.hide();\n });\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Offcanvas.getOrCreateInstance(this, config);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config](this);\n });\n }\n\n}\n/**\n * Data API implementation\n */\n\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$1, SELECTOR_DATA_TOGGLE$1, function (event) {\n const target = getElementFromSelector(this);\n\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n\n if (isDisabled(this)) {\n return;\n }\n\n EventHandler.one(target, EVENT_HIDDEN$3, () => {\n // focus on trigger when it is closed\n if (isVisible(this)) {\n this.focus();\n }\n }); // avoid conflict when clicking a toggler of an offcanvas, while another is open\n\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR);\n\n if (alreadyOpen && alreadyOpen !== target) {\n Offcanvas.getInstance(alreadyOpen).hide();\n }\n\n const data = Offcanvas.getOrCreateInstance(target);\n data.toggle(this);\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$2, () => {\n for (const selector of SelectorEngine.find(OPEN_SELECTOR)) {\n Offcanvas.getOrCreateInstance(selector).show();\n }\n});\nEventHandler.on(window, EVENT_RESIZE, () => {\n for (const element of SelectorEngine.find('[aria-modal][class*=show][class*=offcanvas-]')) {\n if (getComputedStyle(element).position !== 'fixed') {\n Offcanvas.getOrCreateInstance(element).hide();\n }\n }\n});\nenableDismissTrigger(Offcanvas);\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Offcanvas);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/sanitizer.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\nconst uriAttributes = new Set(['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href']);\nconst ARIA_ATTRIBUTE_PATTERN = /^aria-[\\w-]*$/i;\n/**\n * A pattern that recognizes a commonly useful subset of URLs that are safe.\n *\n * Shout-out to Angular https://github.com/angular/angular/blob/12.2.x/packages/core/src/sanitization/url_sanitizer.ts\n */\n\nconst SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file|sms):|[^#&/:?]*(?:[#/?]|$))/i;\n/**\n * A pattern that matches safe data URLs. Only matches image, video and audio types.\n *\n * Shout-out to Angular https://github.com/angular/angular/blob/12.2.x/packages/core/src/sanitization/url_sanitizer.ts\n */\n\nconst DATA_URL_PATTERN = /^data:(?:image\\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\\/(?:mpeg|mp4|ogg|webm)|audio\\/(?:mp3|oga|ogg|opus));base64,[\\d+/a-z]+=*$/i;\n\nconst allowedAttribute = (attribute, allowedAttributeList) => {\n const attributeName = attribute.nodeName.toLowerCase();\n\n if (allowedAttributeList.includes(attributeName)) {\n if (uriAttributes.has(attributeName)) {\n return Boolean(SAFE_URL_PATTERN.test(attribute.nodeValue) || DATA_URL_PATTERN.test(attribute.nodeValue));\n }\n\n return true;\n } // Check if a regular expression validates the attribute.\n\n\n return allowedAttributeList.filter(attributeRegex => attributeRegex instanceof RegExp).some(regex => regex.test(attributeName));\n};\n\nconst DefaultAllowlist = {\n // Global attributes allowed on any supplied element below.\n '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],\n a: ['target', 'href', 'title', 'rel'],\n area: [],\n b: [],\n br: [],\n col: [],\n code: [],\n div: [],\n em: [],\n hr: [],\n h1: [],\n h2: [],\n h3: [],\n h4: [],\n h5: [],\n h6: [],\n i: [],\n img: ['src', 'srcset', 'alt', 'title', 'width', 'height'],\n li: [],\n ol: [],\n p: [],\n pre: [],\n s: [],\n small: [],\n span: [],\n sub: [],\n sup: [],\n strong: [],\n u: [],\n ul: []\n};\nfunction sanitizeHtml(unsafeHtml, allowList, sanitizeFunction) {\n if (!unsafeHtml.length) {\n return unsafeHtml;\n }\n\n if (sanitizeFunction && typeof sanitizeFunction === 'function') {\n return sanitizeFunction(unsafeHtml);\n }\n\n const domParser = new window.DOMParser();\n const createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');\n const elements = [].concat(...createdDocument.body.querySelectorAll('*'));\n\n for (const element of elements) {\n const elementName = element.nodeName.toLowerCase();\n\n if (!Object.keys(allowList).includes(elementName)) {\n element.remove();\n continue;\n }\n\n const attributeList = [].concat(...element.attributes);\n const allowedAttributes = [].concat(allowList['*'] || [], allowList[elementName] || []);\n\n for (const attribute of attributeList) {\n if (!allowedAttribute(attribute, allowedAttributes)) {\n element.removeAttribute(attribute.nodeName);\n }\n }\n }\n\n return createdDocument.body.innerHTML;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): util/template-factory.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$5 = 'TemplateFactory';\nconst Default$4 = {\n allowList: DefaultAllowlist,\n content: {},\n // { selector : text , selector2 : text2 , }\n extraClass: '',\n html: false,\n sanitize: true,\n sanitizeFn: null,\n template: '
'\n};\nconst DefaultType$4 = {\n allowList: 'object',\n content: 'object',\n extraClass: '(string|function)',\n html: 'boolean',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n template: 'string'\n};\nconst DefaultContentType = {\n entry: '(string|element|function|null)',\n selector: '(string|element)'\n};\n/**\n * Class definition\n */\n\nclass TemplateFactory extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n } // Getters\n\n\n static get Default() {\n return Default$4;\n }\n\n static get DefaultType() {\n return DefaultType$4;\n }\n\n static get NAME() {\n return NAME$5;\n } // Public\n\n\n getContent() {\n return Object.values(this._config.content).map(config => this._resolvePossibleFunction(config)).filter(Boolean);\n }\n\n hasContent() {\n return this.getContent().length > 0;\n }\n\n changeContent(content) {\n this._checkContent(content);\n\n this._config.content = { ...this._config.content,\n ...content\n };\n return this;\n }\n\n toHtml() {\n const templateWrapper = document.createElement('div');\n templateWrapper.innerHTML = this._maybeSanitize(this._config.template);\n\n for (const [selector, text] of Object.entries(this._config.content)) {\n this._setContent(templateWrapper, text, selector);\n }\n\n const template = templateWrapper.children[0];\n\n const extraClass = this._resolvePossibleFunction(this._config.extraClass);\n\n if (extraClass) {\n template.classList.add(...extraClass.split(' '));\n }\n\n return template;\n } // Private\n\n\n _typeCheckConfig(config) {\n super._typeCheckConfig(config);\n\n this._checkContent(config.content);\n }\n\n _checkContent(arg) {\n for (const [selector, content] of Object.entries(arg)) {\n super._typeCheckConfig({\n selector,\n entry: content\n }, DefaultContentType);\n }\n }\n\n _setContent(template, content, selector) {\n const templateElement = SelectorEngine.findOne(selector, template);\n\n if (!templateElement) {\n return;\n }\n\n content = this._resolvePossibleFunction(content);\n\n if (!content) {\n templateElement.remove();\n return;\n }\n\n if (isElement(content)) {\n this._putElementInTemplate(getElement(content), templateElement);\n\n return;\n }\n\n if (this._config.html) {\n templateElement.innerHTML = this._maybeSanitize(content);\n return;\n }\n\n templateElement.textContent = content;\n }\n\n _maybeSanitize(arg) {\n return this._config.sanitize ? sanitizeHtml(arg, this._config.allowList, this._config.sanitizeFn) : arg;\n }\n\n _resolvePossibleFunction(arg) {\n return typeof arg === 'function' ? arg(this) : arg;\n }\n\n _putElementInTemplate(element, templateElement) {\n if (this._config.html) {\n templateElement.innerHTML = '';\n templateElement.append(element);\n return;\n }\n\n templateElement.textContent = element.textContent;\n }\n\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): tooltip.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$4 = 'tooltip';\nconst DISALLOWED_ATTRIBUTES = new Set(['sanitize', 'allowList', 'sanitizeFn']);\nconst CLASS_NAME_FADE$2 = 'fade';\nconst CLASS_NAME_MODAL = 'modal';\nconst CLASS_NAME_SHOW$2 = 'show';\nconst SELECTOR_TOOLTIP_INNER = '.tooltip-inner';\nconst SELECTOR_MODAL = `.${CLASS_NAME_MODAL}`;\nconst EVENT_MODAL_HIDE = 'hide.bs.modal';\nconst TRIGGER_HOVER = 'hover';\nconst TRIGGER_FOCUS = 'focus';\nconst TRIGGER_CLICK = 'click';\nconst TRIGGER_MANUAL = 'manual';\nconst EVENT_HIDE$2 = 'hide';\nconst EVENT_HIDDEN$2 = 'hidden';\nconst EVENT_SHOW$2 = 'show';\nconst EVENT_SHOWN$2 = 'shown';\nconst EVENT_INSERTED = 'inserted';\nconst EVENT_CLICK$1 = 'click';\nconst EVENT_FOCUSIN$1 = 'focusin';\nconst EVENT_FOCUSOUT$1 = 'focusout';\nconst EVENT_MOUSEENTER = 'mouseenter';\nconst EVENT_MOUSELEAVE = 'mouseleave';\nconst AttachmentMap = {\n AUTO: 'auto',\n TOP: 'top',\n RIGHT: isRTL() ? 'left' : 'right',\n BOTTOM: 'bottom',\n LEFT: isRTL() ? 'right' : 'left'\n};\nconst Default$3 = {\n allowList: DefaultAllowlist,\n animation: true,\n boundary: 'clippingParents',\n container: false,\n customClass: '',\n delay: 0,\n fallbackPlacements: ['top', 'right', 'bottom', 'left'],\n html: false,\n offset: [0, 0],\n placement: 'top',\n popperConfig: null,\n sanitize: true,\n sanitizeFn: null,\n selector: false,\n template: '
' + '
' + '
' + '
',\n title: '',\n trigger: 'hover focus'\n};\nconst DefaultType$3 = {\n allowList: 'object',\n animation: 'boolean',\n boundary: '(string|element)',\n container: '(string|element|boolean)',\n customClass: '(string|function)',\n delay: '(number|object)',\n fallbackPlacements: 'array',\n html: 'boolean',\n offset: '(array|string|function)',\n placement: '(string|function)',\n popperConfig: '(null|object|function)',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n selector: '(string|boolean)',\n template: 'string',\n title: '(string|element|function)',\n trigger: 'string'\n};\n/**\n * Class definition\n */\n\nclass Tooltip extends BaseComponent {\n constructor(element, config) {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s tooltips require Popper (https://popper.js.org)');\n }\n\n super(element, config); // Private\n\n this._isEnabled = true;\n this._timeout = 0;\n this._isHovered = null;\n this._activeTrigger = {};\n this._popper = null;\n this._templateFactory = null;\n this._newContent = null; // Protected\n\n this.tip = null;\n\n this._setListeners();\n\n if (!this._config.selector) {\n this._fixTitle();\n }\n } // Getters\n\n\n static get Default() {\n return Default$3;\n }\n\n static get DefaultType() {\n return DefaultType$3;\n }\n\n static get NAME() {\n return NAME$4;\n } // Public\n\n\n enable() {\n this._isEnabled = true;\n }\n\n disable() {\n this._isEnabled = false;\n }\n\n toggleEnabled() {\n this._isEnabled = !this._isEnabled;\n }\n\n toggle() {\n if (!this._isEnabled) {\n return;\n }\n\n this._activeTrigger.click = !this._activeTrigger.click;\n\n if (this._isShown()) {\n this._leave();\n\n return;\n }\n\n this._enter();\n }\n\n dispose() {\n clearTimeout(this._timeout);\n EventHandler.off(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n\n if (this._element.getAttribute('data-bs-original-title')) {\n this._element.setAttribute('title', this._element.getAttribute('data-bs-original-title'));\n }\n\n this._disposePopper();\n\n super.dispose();\n }\n\n show() {\n if (this._element.style.display === 'none') {\n throw new Error('Please use show on visible elements');\n }\n\n if (!(this._isWithContent() && this._isEnabled)) {\n return;\n }\n\n const showEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOW$2));\n const shadowRoot = findShadowRoot(this._element);\n\n const isInTheDom = (shadowRoot || this._element.ownerDocument.documentElement).contains(this._element);\n\n if (showEvent.defaultPrevented || !isInTheDom) {\n return;\n } // todo v6 remove this OR make it optional\n\n\n this._disposePopper();\n\n const tip = this._getTipElement();\n\n this._element.setAttribute('aria-describedby', tip.getAttribute('id'));\n\n const {\n container\n } = this._config;\n\n if (!this._element.ownerDocument.documentElement.contains(this.tip)) {\n container.append(tip);\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_INSERTED));\n }\n\n this._popper = this._createPopper(tip);\n tip.classList.add(CLASS_NAME_SHOW$2); // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n\n const complete = () => {\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOWN$2));\n\n if (this._isHovered === false) {\n this._leave();\n }\n\n this._isHovered = false;\n };\n\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n\n hide() {\n if (!this._isShown()) {\n return;\n }\n\n const hideEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDE$2));\n\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n const tip = this._getTipElement();\n\n tip.classList.remove(CLASS_NAME_SHOW$2); // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n\n this._activeTrigger[TRIGGER_CLICK] = false;\n this._activeTrigger[TRIGGER_FOCUS] = false;\n this._activeTrigger[TRIGGER_HOVER] = false;\n this._isHovered = null; // it is a trick to support manual triggering\n\n const complete = () => {\n if (this._isWithActiveTrigger()) {\n return;\n }\n\n if (!this._isHovered) {\n this._disposePopper();\n }\n\n this._element.removeAttribute('aria-describedby');\n\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDDEN$2));\n };\n\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n\n update() {\n if (this._popper) {\n this._popper.update();\n }\n } // Protected\n\n\n _isWithContent() {\n return Boolean(this._getTitle());\n }\n\n _getTipElement() {\n if (!this.tip) {\n this.tip = this._createTipElement(this._newContent || this._getContentForTemplate());\n }\n\n return this.tip;\n }\n\n _createTipElement(content) {\n const tip = this._getTemplateFactory(content).toHtml(); // todo: remove this check on v6\n\n\n if (!tip) {\n return null;\n }\n\n tip.classList.remove(CLASS_NAME_FADE$2, CLASS_NAME_SHOW$2); // todo: on v6 the following can be achieved with CSS only\n\n tip.classList.add(`bs-${this.constructor.NAME}-auto`);\n const tipId = getUID(this.constructor.NAME).toString();\n tip.setAttribute('id', tipId);\n\n if (this._isAnimated()) {\n tip.classList.add(CLASS_NAME_FADE$2);\n }\n\n return tip;\n }\n\n setContent(content) {\n this._newContent = content;\n\n if (this._isShown()) {\n this._disposePopper();\n\n this.show();\n }\n }\n\n _getTemplateFactory(content) {\n if (this._templateFactory) {\n this._templateFactory.changeContent(content);\n } else {\n this._templateFactory = new TemplateFactory({ ...this._config,\n // the `content` var has to be after `this._config`\n // to override config.content in case of popover\n content,\n extraClass: this._resolvePossibleFunction(this._config.customClass)\n });\n }\n\n return this._templateFactory;\n }\n\n _getContentForTemplate() {\n return {\n [SELECTOR_TOOLTIP_INNER]: this._getTitle()\n };\n }\n\n _getTitle() {\n return this._resolvePossibleFunction(this._config.title) || this._element.getAttribute('data-bs-original-title');\n } // Private\n\n\n _initializeOnDelegatedTarget(event) {\n return this.constructor.getOrCreateInstance(event.delegateTarget, this._getDelegateConfig());\n }\n\n _isAnimated() {\n return this._config.animation || this.tip && this.tip.classList.contains(CLASS_NAME_FADE$2);\n }\n\n _isShown() {\n return this.tip && this.tip.classList.contains(CLASS_NAME_SHOW$2);\n }\n\n _createPopper(tip) {\n const placement = typeof this._config.placement === 'function' ? this._config.placement.call(this, tip, this._element) : this._config.placement;\n const attachment = AttachmentMap[placement.toUpperCase()];\n return Popper.createPopper(this._element, tip, this._getPopperConfig(attachment));\n }\n\n _getOffset() {\n const {\n offset\n } = this._config;\n\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n\n return offset;\n }\n\n _resolvePossibleFunction(arg) {\n return typeof arg === 'function' ? arg.call(this._element) : arg;\n }\n\n _getPopperConfig(attachment) {\n const defaultBsPopperConfig = {\n placement: attachment,\n modifiers: [{\n name: 'flip',\n options: {\n fallbackPlacements: this._config.fallbackPlacements\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }, {\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'arrow',\n options: {\n element: `.${this.constructor.NAME}-arrow`\n }\n }, {\n name: 'preSetPlacement',\n enabled: true,\n phase: 'beforeMain',\n fn: data => {\n // Pre-set Popper's placement attribute in order to read the arrow sizes properly.\n // Otherwise, Popper mixes up the width and height dimensions since the initial arrow style is for top placement\n this._getTipElement().setAttribute('data-popper-placement', data.state.placement);\n }\n }]\n };\n return { ...defaultBsPopperConfig,\n ...(typeof this._config.popperConfig === 'function' ? this._config.popperConfig(defaultBsPopperConfig) : this._config.popperConfig)\n };\n }\n\n _setListeners() {\n const triggers = this._config.trigger.split(' ');\n\n for (const trigger of triggers) {\n if (trigger === 'click') {\n EventHandler.on(this._element, this.constructor.eventName(EVENT_CLICK$1), this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n\n context.toggle();\n });\n } else if (trigger !== TRIGGER_MANUAL) {\n const eventIn = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSEENTER) : this.constructor.eventName(EVENT_FOCUSIN$1);\n const eventOut = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSELEAVE) : this.constructor.eventName(EVENT_FOCUSOUT$1);\n EventHandler.on(this._element, eventIn, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n\n context._activeTrigger[event.type === 'focusin' ? TRIGGER_FOCUS : TRIGGER_HOVER] = true;\n\n context._enter();\n });\n EventHandler.on(this._element, eventOut, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n\n context._activeTrigger[event.type === 'focusout' ? TRIGGER_FOCUS : TRIGGER_HOVER] = context._element.contains(event.relatedTarget);\n\n context._leave();\n });\n }\n }\n\n this._hideModalHandler = () => {\n if (this._element) {\n this.hide();\n }\n };\n\n EventHandler.on(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n }\n\n _fixTitle() {\n const title = this._element.getAttribute('title');\n\n if (!title) {\n return;\n }\n\n if (!this._element.getAttribute('aria-label') && !this._element.textContent.trim()) {\n this._element.setAttribute('aria-label', title);\n }\n\n this._element.setAttribute('data-bs-original-title', title); // DO NOT USE IT. Is only for backwards compatibility\n\n\n this._element.removeAttribute('title');\n }\n\n _enter() {\n if (this._isShown() || this._isHovered) {\n this._isHovered = true;\n return;\n }\n\n this._isHovered = true;\n\n this._setTimeout(() => {\n if (this._isHovered) {\n this.show();\n }\n }, this._config.delay.show);\n }\n\n _leave() {\n if (this._isWithActiveTrigger()) {\n return;\n }\n\n this._isHovered = false;\n\n this._setTimeout(() => {\n if (!this._isHovered) {\n this.hide();\n }\n }, this._config.delay.hide);\n }\n\n _setTimeout(handler, timeout) {\n clearTimeout(this._timeout);\n this._timeout = setTimeout(handler, timeout);\n }\n\n _isWithActiveTrigger() {\n return Object.values(this._activeTrigger).includes(true);\n }\n\n _getConfig(config) {\n const dataAttributes = Manipulator.getDataAttributes(this._element);\n\n for (const dataAttribute of Object.keys(dataAttributes)) {\n if (DISALLOWED_ATTRIBUTES.has(dataAttribute)) {\n delete dataAttributes[dataAttribute];\n }\n }\n\n config = { ...dataAttributes,\n ...(typeof config === 'object' && config ? config : {})\n };\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n\n this._typeCheckConfig(config);\n\n return config;\n }\n\n _configAfterMerge(config) {\n config.container = config.container === false ? document.body : getElement(config.container);\n\n if (typeof config.delay === 'number') {\n config.delay = {\n show: config.delay,\n hide: config.delay\n };\n }\n\n if (typeof config.title === 'number') {\n config.title = config.title.toString();\n }\n\n if (typeof config.content === 'number') {\n config.content = config.content.toString();\n }\n\n return config;\n }\n\n _getDelegateConfig() {\n const config = {};\n\n for (const key in this._config) {\n if (this.constructor.Default[key] !== this._config[key]) {\n config[key] = this._config[key];\n }\n }\n\n config.selector = false;\n config.trigger = 'manual'; // In the future can be replaced with:\n // const keysWithDifferentValues = Object.entries(this._config).filter(entry => this.constructor.Default[entry[0]] !== this._config[entry[0]])\n // `Object.fromEntries(keysWithDifferentValues)`\n\n return config;\n }\n\n _disposePopper() {\n if (this._popper) {\n this._popper.destroy();\n\n this._popper = null;\n }\n\n if (this.tip) {\n this.tip.remove();\n this.tip = null;\n }\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Tooltip.getOrCreateInstance(this, config);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config]();\n });\n }\n\n}\n/**\n * jQuery\n */\n\n\ndefineJQueryPlugin(Tooltip);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): popover.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$3 = 'popover';\nconst SELECTOR_TITLE = '.popover-header';\nconst SELECTOR_CONTENT = '.popover-body';\nconst Default$2 = { ...Tooltip.Default,\n content: '',\n offset: [0, 8],\n placement: 'right',\n template: '
' + '
' + '

' + '
' + '
',\n trigger: 'click'\n};\nconst DefaultType$2 = { ...Tooltip.DefaultType,\n content: '(null|string|element|function)'\n};\n/**\n * Class definition\n */\n\nclass Popover extends Tooltip {\n // Getters\n static get Default() {\n return Default$2;\n }\n\n static get DefaultType() {\n return DefaultType$2;\n }\n\n static get NAME() {\n return NAME$3;\n } // Overrides\n\n\n _isWithContent() {\n return this._getTitle() || this._getContent();\n } // Private\n\n\n _getContentForTemplate() {\n return {\n [SELECTOR_TITLE]: this._getTitle(),\n [SELECTOR_CONTENT]: this._getContent()\n };\n }\n\n _getContent() {\n return this._resolvePossibleFunction(this._config.content);\n } // Static\n\n\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Popover.getOrCreateInstance(this, config);\n\n if (typeof config !== 'string') {\n return;\n }\n\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n\n data[config]();\n });\n }\n\n}\n/**\n * jQuery\n */\n\n\ndefineJQueryPlugin(Popover);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap (v5.2.3): scrollspy.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n/**\n * Constants\n */\n\nconst NAME$2 = 'scrollspy';\nconst DATA_KEY$2 = 'bs.scrollspy';\nconst EVENT_KEY$2 = `.${DATA_KEY$2}`;\nconst DATA_API_KEY = '.data-api';\nconst EVENT_ACTIVATE = `activate${EVENT_KEY$2}`;\nconst EVENT_CLICK = `click${EVENT_KEY$2}`;\nconst EVENT_LOAD_DATA_API$1 = `load${EVENT_KEY$2}${DATA_API_KEY}`;\nconst CLASS_NAME_DROPDOWN_ITEM = 'dropdown-item';\nconst CLASS_NAME_ACTIVE$1 = 'active';\nconst SELECTOR_DATA_SPY = '[data-bs-spy=\"scroll\"]';\nconst SELECTOR_TARGET_LINKS = '[href]';\nconst SELECTOR_NAV_LIST_GROUP = '.nav, .list-group';\nconst SELECTOR_NAV_LINKS = '.nav-link';\nconst SELECTOR_NAV_ITEMS = '.nav-item';\nconst SELECTOR_LIST_ITEMS = '.list-group-item';\nconst SELECTOR_LINK_ITEMS = `${SELECTOR_NAV_LINKS}, ${SELECTOR_NAV_ITEMS} > ${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}`;\nconst SELECTOR_DROPDOWN = '.dropdown';\nconst SELECTOR_DROPDOWN_TOGGLE$1 = '.dropdown-toggle';\nconst Default$1 = {\n offset: null,\n // TODO: v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: '0px 0px -25%',\n smoothScroll: false,\n target: null,\n threshold: [0.1, 0.5, 1]\n};\nconst DefaultType$1 = {\n offset: '(number|null)',\n // TODO v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: 'string',\n smoothScroll: 'boolean',\n target: 'element',\n threshold: 'array'\n};\n/**\n * Class definition\n */\n\nclass ScrollSpy extends BaseComponent {\n constructor(element, config) {\n super(element, config); // this._element is the observablesContainer and config.target the menu links wrapper\n\n this._targetLinks = new Map();\n this._observableSections = new Map();\n this._rootElement = getComputedStyle(this._element).overflowY === 'visible' ? null : this._element;\n this._activeTarget = null;\n this._observer = null;\n this._previousScrollData = {\n visibleEntryTop: 0,\n parentScrollTop: 0\n };\n this.refresh(); // initialize\n } // Getters\n\n\n static get Default() {\n return Default$1;\n }\n\n static get DefaultType() {\n return DefaultType$1;\n }\n\n static get NAME() {\n return NAME$2;\n } // Public\n\n\n refresh() {\n this._initializeTargetsAndObservables();\n\n this._maybeEnableSmoothScroll();\n\n if (this._observer) {\n this._observer.disconnect();\n } else {\n this._observer = this._getNewObserver();\n }\n\n for (const section of this._observableSections.values()) {\n this._observer.observe(section);\n }\n }\n\n dispose() {\n this._observer.disconnect();\n\n super.dispose();\n } // Private\n\n\n _configAfterMerge(config) {\n // TODO: on v6 target should be given explicitly & remove the {target: 'ss-target'} case\n config.target = getElement(config.target) || document.body; // TODO: v6 Only for backwards compatibility reasons. Use rootMargin only\n\n config.rootMargin = config.offset ? `${config.offset}px 0px -30%` : config.rootMargin;\n\n if (typeof config.threshold === 'string') {\n config.threshold = config.threshold.split(',').map(value => Number.parseFloat(value));\n }\n\n return config;\n }\n\n _maybeEnableSmoothScroll() {\n if (!this._config.smoothScroll) {\n return;\n } // unregister any previous listeners\n\n\n EventHandler.off(this._config.target, EVENT_CLICK);\n EventHandler.on(this._config.target, EVENT_CLICK, SELECTOR_TARGET_LINKS, event => {\n const observableSection = this._observableSections.get(event.target.hash);\n\n if (observableSection) {\n event.preventDefault();\n const root = this._rootElement || window;\n const height = observableSection.offsetTop - this._element.offsetTop;\n\n if (root.scrollTo) {\n root.scrollTo({\n top: height,\n behavior: 'smooth'\n });\n return;\n } // Chrome 60 doesn't support `scrollTo`\n\n\n root.scrollTop = height;\n }\n });\n }\n\n _getNewObserver() {\n const options = {\n root: this._rootElement,\n threshold: this._config.threshold,\n rootMargin: this._config.rootMargin\n };\n return new IntersectionObserver(entries => this._observerCallback(entries), options);\n } // The logic of selection\n\n\n _observerCallback(entries) {\n const targetElement = entry => this._targetLinks.get(`#${entry.target.id}`);\n\n const activate = entry => {\n this._previousScrollData.visibleEntryTop = entry.target.offsetTop;\n\n this._process(targetElement(entry));\n };\n\n const parentScrollTop = (this._rootElement || document.documentElement).scrollTop;\n const userScrollsDown = parentScrollTop >= this._previousScrollData.parentScrollTop;\n this._previousScrollData.parentScrollTop = parentScrollTop;\n\n for (const entry of entries) {\n if (!entry.isIntersecting) {\n this._activeTarget = null;\n\n this._clearActiveClass(targetElement(entry));\n\n continue;\n }\n\n const entryIsLowerThanPrevious = entry.target.offsetTop >= this._previousScrollData.visibleEntryTop; // if we are scrolling down, pick the bigger offsetTop\n\n if (userScrollsDown && entryIsLowerThanPrevious) {\n activate(entry); // if parent isn't scrolled, let's keep the first visible item, breaking the iteration\n\n if (!parentScrollTop) {\n return;\n }\n\n continue;\n } // if we are scrolling up, pick the smallest offsetTop\n\n\n if (!userScrollsDown && !entryIsLowerThanPrevious) {\n activate(entry);\n }\n }\n }\n\n _initializeTargetsAndObservables() {\n this._targetLinks = new Map();\n this._observableSections = new Map();\n const targetLinks = SelectorEngine.find(SELECTOR_TARGET_LINKS, this._config.target);\n\n for (const anchor of targetLinks) {\n // ensure that the anchor has an id and is not disabled\n if (!anchor.hash || isDisabled(anchor)) {\n continue;\n }\n\n const observableSection = SelectorEngine.findOne(anchor.hash, this._element); // ensure that the observableSection exists & is visible\n\n if (isVisible(observableSection)) {\n this._targetLinks.set(anchor.hash, anchor);\n\n this._observableSections.set(anchor.hash, observableSection);\n }\n }\n }\n\n _process(target) {\n if (this._activeTarget === target) {\n return;\n }\n\n this._clearActiveClass(this._config.target);\n\n this._activeTarget = target;\n target.classList.add(CLASS_NAME_ACTIVE$1);\n\n this._activateParents(target);\n\n EventHandler.trigger(this._element, EVENT_ACTIVATE, {\n relatedTarget: target\n });\n }\n\n _activateParents(target) {\n // Activate dropdown parents\n if (target.classList.contains(CLASS_NAME_DROPDOWN_ITEM)) {\n SelectorEngine.findOne(SELECTOR_DROPDOWN_TOGGLE$1, target.closest(SELECTOR_DROPDOWN)).classList.add(CLASS_NAME_ACTIVE$1);\n return;\n }\n\n for (const listGroup of SelectorEngine.parents(target, SELECTOR_NAV_LIST_GROUP)) {\n // Set triggered links parents as active\n // With both
    and