diff --git a/.appveyor.yml b/.appveyor.yml new file mode 100644 index 0000000..338380e --- /dev/null +++ b/.appveyor.yml @@ -0,0 +1,29 @@ +image: Visual Studio 2022 + +environment: + global: + RANDOM_SEED: 0 + matrix: + - PYTHON_MAJOR: 3 + PYTHON_MINOR: 11 + +cache: + - .venv -> poetry.lock + +install: + # Add Python to the PATH + - set PATH=C:\Python%PYTHON_MAJOR%%PYTHON_MINOR%;%PATH% + - set PATH=C:\Python%PYTHON_MAJOR%%PYTHON_MINOR%\Scripts;%PATH% + # Install system dependencies + - choco install make + - curl -sSL https://install.python-poetry.org | python - + - set PATH=%USERPROFILE%\AppData\Roaming\Python\Scripts;%PATH% + - make doctor + # Install project dependencies + - make install + +build: off + +test_script: + - make check + - make test diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..45ea722 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] + +branch = true + +data_file = .cache/coverage + +omit = + .venv/* + */tests/* + */__main__.py + +[report] + +exclude_lines = + pragma: no cover + raise NotImplementedError + except DistributionNotFound + TYPE_CHECKING diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..0f68208 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +* text=auto +CHANGELOG.md merge=union +poetry.lock merge=binary diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..9e39822 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,45 @@ +name: main + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.11'] + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - uses: Gr1N/setup-poetry@v8 + + - name: Check dependencies + run: make doctor + + - uses: actions/cache@v2 + with: + path: .venv + key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Install dependencies + run: make install + + - name: Check code + run: make check + + - name: Test code + run: make test + + - name: Upload coverage + uses: codecov/codecov-action@v4 + if: steps.fork-check.outputs.is-fork == 'false' + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d25972a --- /dev/null +++ b/.gitignore @@ -0,0 +1,50 @@ +# Temporary Python files +*.pyc +*.egg-info/ +__pycache__/ +.ipynb_checkpoints/ +setup.py +pip-wheel-metadata/ + +# Temporary OS files +Icon* + +# Temporary virtual environment files +/.cache/ +/.venv/ +tmp/ + +# Temporary server files +.env +*.pid + +# Generated documentation +/docs/gen/ +/docs/apidocs/ +/site/ +/*.html +/docs/*.png + +# Google Drive +*.gdoc +*.gsheet +*.gslides +*.gdraw + +# Testing and coverage results +/.coverage +/.coverage.* +/htmlcov/ +/prof/ +coverage.xml + +# Build and release directories +/build/ +/dist/ +*.spec + +# Sublime Text +*.sublime-workspace + +# Eclipse +.settings diff --git a/.pydocstyle.ini b/.pydocstyle.ini new file mode 100644 index 0000000..69e38cb --- /dev/null +++ b/.pydocstyle.ini @@ -0,0 +1,14 @@ +[pydocstyle] + +# D211: No blank lines allowed before class docstring +add_select = D211 + +# D100: Missing docstring in public module +# D101: Missing docstring in public class +# D102: Missing docstring in public method +# D103: Missing docstring in public function +# D104: Missing docstring in public package +# D105: Missing docstring in magic method +# D107: Missing docstring in __init__ +# D202: No blank lines allowed after function docstring +add_ignore = D100,D101,D102,D103,D104,D105,D107,D202 diff --git a/.pylint.ini b/.pylint.ini new file mode 100644 index 0000000..c6700f7 --- /dev/null +++ b/.pylint.ini @@ -0,0 +1,411 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=0 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins=pylint_pytest + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable= + fixme, + global-statement, + invalid-name, + missing-docstring, + redefined-outer-name, + too-few-public-methods, + too-many-locals, + too-many-arguments, + unnecessary-pass, + broad-except, + duplicate-code, + too-many-branches, + too-many-return-statements, + too-many-public-methods, + too-many-ancestors, + too-many-instance-attributes, + too-many-statements, + attribute-defined-outside-init, + unsupported-assignment-operation, + unsupported-delete-operation, + too-many-nested-blocks, + protected-access, + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=no + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[BASIC] + +# Regular expression matching correct argument names +argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct attribute names +attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Regular expression matching correct function names +function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct method names +method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Regular expression matching correct variable names +variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^.*((https?:)|(pragma:)|(TODO:)).*$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=88 + +# Maximum number of lines in a module +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/.scrutinizer.yml b/.scrutinizer.yml new file mode 100644 index 0000000..1b741d6 --- /dev/null +++ b/.scrutinizer.yml @@ -0,0 +1,12 @@ +build: + tests: + override: + - pylint-run --rcfile=.pylint.ini + - py-scrutinizer-run +checks: + python: + code_rating: true + duplicate_code: true +filter: + excluded_paths: + - "*/tests/*" diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000..fb12e21 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +python 3.11.5 +poetry 1.7.0 diff --git a/.verchew.ini b/.verchew.ini new file mode 100644 index 0000000..5ed9c9a --- /dev/null +++ b/.verchew.ini @@ -0,0 +1,22 @@ +[Make] + +cli = make +version = GNU Make + +[Python] + +cli = python +version = 3 + +[Poetry] + +cli = poetry +version = 1 + +[Graphviz] + +cli = dot +cli_version_arg = -V +version = 7 || 8 || 9 || 10 +optional = true +message = This is only needed to generate UML diagrams for documentation. diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..e06fa34 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,45 @@ +{ + "files.exclude": { + ".cache/": true, + ".venv/": true, + "*.egg-info": true, + "pip-wheel-metadata/": true, + "**/__pycache__": true, + "**/*.pyc": true, + "**/.ipynb_checkpoints": true, + "**/tmp/": true, + "dist/": true, + "htmlcov/": true, + "notebooks/*.yml": true, + "notebooks/files/": true, + "notebooks/inventory/": true, + "prof/": true, + "site/": true, + "geckodriver.log": true, + "targets.log": true, + "bin/verchew": true + }, + "editor.formatOnSave": true, + "pylint.args": ["--rcfile=.pylint.ini"], + "cSpell.words": [ + "asdf", + "builtins", + "codecov", + "codehilite", + "choco", + "cygstart", + "cygwin", + "dataclasses", + "Graphviz", + "ipython", + "mkdocs", + "noclasses", + "pipx", + "pyenv", + "ruamel", + "showfspath", + "USERPROFILE", + "venv", + "verchew" + ] + } diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..43cdb0d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release Notes + +## 0.1 - 2024-08-27 + + - First public version diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..dbf0236 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,93 @@ +# Contributor Guide + +## Setup + +### Requirements + +* Make: + - macOS: `$ xcode-select --install` + - Linux: [https://www.gnu.org](https://www.gnu.org/software/make) + - Windows: `$ choco install make` [https://chocolatey.org](https://chocolatey.org/install) +* Python: `$ asdf install` (https://asdf-vm.com)[https://asdf-vm.com/guide/getting-started.html] +* Poetry: [https://python-poetry.org](https://python-poetry.org/docs/#installation) +* Graphviz: + * macOS: `$ brew install graphviz` + * Linux: [https://graphviz.org/download](https://graphviz.org/download/) + * Windows: [https://graphviz.org/download](https://graphviz.org/download/) + +To confirm these system dependencies are configured correctly: + +```text +$ make bootstrap +$ make doctor +``` + +### Installation + +Install project dependencies into a virtual environment: + +```text +$ make install +``` + +## Development Tasks + +### Manual + +Run the tests: + +```text +$ make test +``` + +Run static analysis: + +```text +$ make check +``` + +Build the documentation: + +```text +$ make docs +``` + +### Automatic + +Keep all of the above tasks running on change: + +```text +$ make dev +``` + +> In order to have OS X notifications, `brew install terminal-notifier`. + +### Continuous Integration + +The CI server will report overall build status: + +```text +$ make all +``` + +## Demo Tasks + +Run the program: + +```text +$ make run +``` + +Launch an IPython session: + +```text +$ make shell +``` + +## Release Tasks + +Release to PyPI: + +```text +$ make upload +``` diff --git a/LICENSE b/LICENSE.md similarity index 100% rename from LICENSE rename to LICENSE.md diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..eee9431 --- /dev/null +++ b/Makefile @@ -0,0 +1,200 @@ +PROJECT := filecloudapi-python +PACKAGE := filecloudapi +MODULES := $(wildcard $(PACKAGE)/*.py) + +# MAIN TASKS ################################################################## + +.PHONY: all +all: doctor format check test mkdocs ## Run all tasks that determine CI status + +.PHONY: dev +dev: install ## Continuously run CI tasks when files chanage + poetry run sniffer + +# SYSTEM DEPENDENCIES ######################################################### + +.PHONY: bootstrap +bootstrap: ## Attempt to install system dependencies + asdf plugin add python || asdf plugin update python + asdf plugin add poetry || asdf plugin update poetry + asdf install + +.PHONY: doctor +doctor: ## Confirm system dependencies are available + bin/verchew + +# PROJECT DEPENDENCIES ######################################################## + +VIRTUAL_ENV ?= .venv +DEPENDENCIES := $(VIRTUAL_ENV)/.poetry-$(shell bin/checksum pyproject.toml poetry.lock) + +.PHONY: install +install: $(DEPENDENCIES) .cache ## Install project dependencies + +$(DEPENDENCIES): poetry.lock + @ rm -rf $(VIRTUAL_ENV)/.poetry-* + @ rm -rf ~/Library/Preferences/pypoetry + @ poetry config virtualenvs.in-project true + poetry install + @ touch $@ + +ifndef CI +poetry.lock: pyproject.toml + poetry lock --no-update + @ touch $@ +endif + +.cache: + @ mkdir -p .cache + +.PHONY: clean +clean: ## Delete all generated and temporary files + find $(PACKAGE) tests -name '__pycache__' -delete + rm -rf *.egg-info + rm -rf .cache .pytest .coverage htmlcov + rm -rf docs/*.png site + rm -rf *.spec dist build + rm -rf $(VIRTUAL_ENV) + +# TEST ######################################################################## + +RANDOM_SEED ?= $(shell date +%s) +FAILURES := .cache/pytest/v/cache/lastfailed + +PYTEST_OPTIONS := --random --random-seed=$(RANDOM_SEED) +ifndef DISABLE_COVERAGE +PYTEST_OPTIONS += --cov=$(PACKAGE) +endif +ifdef CI +PYTEST_OPTIONS += --cov-report=xml +endif +PYTEST_RERUN_OPTIONS := --last-failed --exitfirst + +.PHONY: test +test: test-all ## Run unit and integration tests + +.PHONY: test-unit +test-unit: install + @ ( mv $(FAILURES) $(FAILURES).bak || true ) > /dev/null 2>&1 + poetry run pytest $(PACKAGE) $(PYTEST_OPTIONS) + @ ( mv $(FAILURES).bak $(FAILURES) || true ) > /dev/null 2>&1 +ifndef DISABLE_COVERAGE + poetry run coveragespace update unit +endif + +.PHONY: test-int +test-int: install + @ if test -e $(FAILURES); then poetry run pytest tests $(PYTEST_RERUN_OPTIONS); fi + @ rm -rf $(FAILURES) + poetry run pytest tests $(PYTEST_OPTIONS) +ifndef DISABLE_COVERAGE + poetry run coveragespace update integration +endif + +.PHONY: test-all +test-all: install + @ if test -e $(FAILURES); then poetry run pytest $(PACKAGE) tests $(PYTEST_RERUN_OPTIONS); fi + @ rm -rf $(FAILURES) + poetry run pytest $(PACKAGE) tests $(PYTEST_OPTIONS) +ifndef DISABLE_COVERAGE + poetry run coveragespace update overall +endif + +.PHONY: read-coverage +read-coverage: + bin/open htmlcov/index.html + +# CHECK ####################################################################### + +.PHONY: format +format: install + poetry run isort $(PACKAGE) tests notebooks + poetry run black $(PACKAGE) tests notebooks + @ echo + +.PHONY: check +check: install format ## Run formaters, linters, and static analysis +ifdef CI + git diff --exit-code +endif + poetry run mypy $(PACKAGE) tests + poetry run pylint $(PACKAGE) tests --rcfile=.pylint.ini + poetry run pydocstyle $(PACKAGE) tests + +# DOCUMENTATION ############################################################### + +MKDOCS_INDEX := site/index.html + +.PHONY: docs +docs: mkdocs uml ## Generate documentation and UML +ifndef CI + @ eval "sleep 3; bin/open http://127.0.0.1:8000" & + poetry run mkdocs serve +endif + +.PHONY: mkdocs +mkdocs: install $(MKDOCS_INDEX) +$(MKDOCS_INDEX): docs/requirements.txt mkdocs.yml docs/*.md + @ mkdir -p docs/about + @ cd docs && ln -sf ../README.md index.md + @ cd docs/about && ln -sf ../../CHANGELOG.md changelog.md + @ cd docs/about && ln -sf ../../CONTRIBUTING.md contributing.md + @ cd docs/about && ln -sf ../../LICENSE.md license.md + poetry run mkdocs build --clean --strict + +docs/requirements.txt: poetry.lock + @ poetry export --with dev --without-hashes | grep mkdocs > $@ + @ poetry export --with dev --without-hashes | grep pygments >> $@ + @ poetry export --with dev --without-hashes | grep jinja2 >> $@ + +.PHONY: uml +uml: install docs/*.png +docs/*.png: $(MODULES) + poetry run pyreverse $(PACKAGE) -p $(PACKAGE) -a 1 -f ALL -o png --ignore tests + - mv -f classes_$(PACKAGE).png docs/classes.png + - mv -f packages_$(PACKAGE).png docs/packages.png + +# DEMO ######################################################################## + +.PHONY: run +run: install ## Start the program + poetry run python $(PACKAGE)/__main__.py + +.PHONY: shell +shell: install ## Launch an IPython session + poetry run ipython --ipython-dir=notebooks + +# BUILD ####################################################################### + +DIST_FILES := dist/*.tar.gz dist/*.whl +EXE_FILES := dist/$(PACKAGE).* + +.PHONY: dist +dist: install $(DIST_FILES) +$(DIST_FILES): $(MODULES) pyproject.toml + rm -f $(DIST_FILES) + poetry build + +.PHONY: exe +exe: install $(EXE_FILES) +$(EXE_FILES): $(MODULES) $(PACKAGE).spec + poetry run pyinstaller $(PACKAGE).spec --noconfirm --clean + +$(PACKAGE).spec: + poetry run pyi-makespec $(PACKAGE)/__main__.py --onefile --windowed --name=$(PACKAGE) + +# RELEASE ##################################################################### + +.PHONY: upload +upload: dist ## Upload the current version to PyPI + git diff --name-only --exit-code + poetry publish + bin/open https://pypi.org/project/$(PROJECT) + +# HELP ######################################################################## + +.PHONY: help +help: install + @ grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +.DEFAULT_GOAL := help diff --git a/README.md b/README.md index 5d8628e..17bc89d 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,44 @@ -# filecloudapi-python -FileCloud API for Python +# Overview + +A Python library to connect to a Filecloud server + +This project was generated with [cookiecutter](https://github.com/audreyr/cookiecutter) using [jacebrowning/template-python](https://github.com/jacebrowning/template-python). + +[![Linux Build](https://img.shields.io/github/actions/workflow/status/codelathe/template-python-demo/main.yml?branch=main&label=linux)](https://github.com/codelathe/filecloudapi-python/actions) +[![Windows Build](https://img.shields.io/appveyor/ci/codelathe/template-python-demo/main.svg?label=windows)](https://ci.appveyor.com/project/codelathe/filecloudapi-python) +[![Code Coverage](https://img.shields.io/codecov/c/github/codelathe/filecloudapi-python) +](https://codecov.io/gh/codelathe/filecloudapi-python) +[![Code Quality](https://img.shields.io/scrutinizer/g/codelathe/filecloudapi-python.svg?label=quality)](https://scrutinizer-ci.com/g/codelathe/filecloudapi-python/?branch=main) +[![PyPI License](https://img.shields.io/pypi/l/filecloudapi-python.svg)](https://pypi.org/project/filecloudapi-python) +[![PyPI Version](https://img.shields.io/pypi/v/filecloudapi-python.svg?label=version)](https://pypi.org/project/filecloudapi-python) +[![PyPI Downloads](https://img.shields.io/pypi/dm/filecloudapi-python.svg?color=orange)](https://pypistats.org/packages/filecloudapi-python) + +## Setup + +### Requirements + +* Python 3.11+ + +### Installation + +Install it directly into an activated virtual environment: + +```text +$ pip install filecloudapi-python +``` + +or add it to your [Poetry](https://poetry.eustace.io/) project: + +```text +$ poetry add filecloudapi-python +``` + +## Usage + +After installation, the package can be imported: + +```text +$ python +>>> import filecloudapi +>>> filecloudapi.__version__ +``` diff --git a/bin/checksum b/bin/checksum new file mode 100644 index 0000000..f38bcd6 --- /dev/null +++ b/bin/checksum @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import hashlib +import sys + + +def run(paths): + sha = hashlib.sha1() + + for path in paths: + try: + with open(path, 'rb') as f: + for chunk in iter(lambda: f.read(4096), b''): + sha.update(chunk) + except IOError: + sha.update(path.encode()) + + print(sha.hexdigest()) + + +if __name__ == '__main__': + run(sys.argv[1:]) diff --git a/bin/open b/bin/open new file mode 100644 index 0000000..f7ae38a --- /dev/null +++ b/bin/open @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sys + + +COMMANDS = { + 'linux': "open", + 'win32': "cmd /c start", + 'cygwin': "cygstart", + 'darwin': "open", +} + + +def run(path): + command = COMMANDS.get(sys.platform, "open") + os.system(command + ' ' + path) + + +if __name__ == '__main__': + run(sys.argv[-1]) diff --git a/bin/update b/bin/update new file mode 100644 index 0000000..fcf3959 --- /dev/null +++ b/bin/update @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +from contextlib import suppress +import importlib +import tempfile +import shutil +import subprocess +import sys + + +CWD = os.getcwd() +TMP = tempfile.gettempdir() +CONFIG = { + "full_name": "FileCloud", + "email": "dev@filecloud.com", + "github_username": "codelathe", + "github_repo": "filecloudapi-python", + "default_branch": "main", + "project_name": "filecloudapi-python", + "package_name": "filecloudapi", + "project_short_description": "A Python library to connect to a Filecloud server", + "python_major_version": 3, + "python_minor_version": 11, +} + + +def install(package="cookiecutter"): + try: + importlib.import_module(package) + except ImportError: + print("Installing cookiecutter") + subprocess.check_call([sys.executable, "-m", "pip", "install", package]) + + +def run(): + print("Generating project") + + from cookiecutter.main import cookiecutter + + os.chdir(TMP) + cookiecutter( + "https://github.com/jacebrowning/template-python.git", + no_input=True, + overwrite_if_exists=True, + extra_context=CONFIG, + ) + + +def copy(): + for filename in [ + os.path.join("bin", "update"), + os.path.join("bin", "checksum"), + os.path.join("bin", "open"), + os.path.join("bin", "verchew"), + ".appveyor.yml", + ".coveragerc", + ".gitattributes", + ".gitignore", + ".pydocstyle.ini", + ".pylint.ini", + ".scrutinizer.yml", + ".tool-versions", + ".verchew.ini", + "CONTRIBUTING.md", + "Makefile", + "scent.py", + ]: + src = os.path.join(TMP, CONFIG["project_name"], filename) + dst = os.path.join(CWD, filename) + print("Updating " + filename) + with suppress(FileNotFoundError): + shutil.copy(src, dst) + + +if __name__ == "__main__": + install() + run() + copy() diff --git a/bin/verchew b/bin/verchew new file mode 100644 index 0000000..5061561 --- /dev/null +++ b/bin/verchew @@ -0,0 +1,389 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# The MIT License (MIT) +# Copyright © 2016, Jace Browning +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +# Source: https://github.com/jacebrowning/verchew +# Documentation: https://verchew.readthedocs.io +# Package: https://pypi.org/project/verchew + + +from __future__ import unicode_literals + +import argparse +import logging +import os +import re +import sys +from collections import OrderedDict +from subprocess import PIPE, STDOUT, Popen + +PY2 = sys.version_info[0] == 2 + +if PY2: + import ConfigParser as configparser + from urllib import urlretrieve +else: + import configparser + from urllib.request import urlretrieve + +__version__ = "3.4.2" + +SCRIPT_URL = ( + "https://raw.githubusercontent.com/jacebrowning/verchew/main/verchew/script.py" +) +WRAPPER_URL = ( + "https://raw.githubusercontent.com/jacebrowning/verchew/main/verchew/wrapper.sh" +) + +CONFIG_FILENAMES = ["verchew.ini", ".verchew.ini", ".verchewrc", ".verchew"] + +SAMPLE_CONFIG = """ +[Python] + +cli = python +version = Python 3.5 || Python 3.6 + +[Legacy Python] + +cli = python2 +version = Python 2.7 + +[virtualenv] + +cli = virtualenv +version = 15 +message = Only required with Python 2. + +[Make] + +cli = make +version = GNU Make +optional = true + +""".strip() + +STYLE = { + "~": "✔", + "?": "▴", + "x": "✘", + "#": "䷉", +} + +COLOR = { + "~": "\033[92m", # green + "?": "\033[93m", # yellow + "x": "\033[91m", # red + "#": "\033[96m", # cyan + None: "\033[0m", # reset +} + +QUIET = False + +log = logging.getLogger(__name__) + + +def main(): + global QUIET + + args = parse_args() + configure_logging(args.verbose) + if args.quiet: + QUIET = True + + log.debug("PWD: %s", os.getenv("PWD")) + log.debug("PATH: %s", os.getenv("PATH")) + + if args.vendor: + vendor_script(SCRIPT_URL, args.vendor) + vendor_script(WRAPPER_URL, args.vendor + "-wrapper") + sys.exit(0) + + path = find_config(args.root, generate=args.init) + config = parse_config(path) + + if not check_dependencies(config) and args.exit_code: + sys.exit(1) + + +def parse_args(): + parser = argparse.ArgumentParser(description="System dependency version checker.") + + version = "%(prog)s v" + __version__ + parser.add_argument("--version", action="version", version=version) + parser.add_argument( + "-r", "--root", metavar="PATH", help="specify a custom project root directory" + ) + parser.add_argument( + "--exit-code", + action="store_true", + help="return a non-zero exit code on failure", + ) + + group_logging = parser.add_mutually_exclusive_group() + group_logging.add_argument( + "-v", "--verbose", action="count", default=0, help="enable verbose logging" + ) + group_logging.add_argument( + "-q", "--quiet", action="store_true", help="suppress all output on success" + ) + + group_commands = parser.add_argument_group("commands") + group_commands.add_argument( + "--init", action="store_true", help="generate a sample configuration file" + ) + + group_commands.add_argument( + "--vendor", metavar="PATH", help="download the program for offline use" + ) + + args = parser.parse_args() + + return args + + +def configure_logging(count=0): + if count == 0: + level = logging.WARNING + elif count == 1: + level = logging.INFO + else: + level = logging.DEBUG + + logging.basicConfig(level=level, format="%(levelname)s: %(message)s") + + +def vendor_script(url, path): + root = os.path.abspath(os.path.join(path, os.pardir)) + if not os.path.isdir(root): + log.info("Creating directory %s", root) + os.makedirs(root) + + log.info("Downloading %s to %s", url, path) + urlretrieve(url, path) + + log.debug("Making %s executable", path) + mode = os.stat(path).st_mode + os.chmod(path, mode | 0o111) + + +def find_config(root=None, filenames=None, generate=False): + root = root or os.getcwd() + filenames = filenames or CONFIG_FILENAMES + + path = None + log.info("Looking for config file in: %s", root) + log.debug("Filename options: %s", ", ".join(filenames)) + for filename in os.listdir(root): + if filename in filenames: + path = os.path.join(root, filename) + log.info("Found config file: %s", path) + return path + + if generate: + path = generate_config(root, filenames) + return path + + msg = "No config file found in: {0}".format(root) + raise RuntimeError(msg) + + +def generate_config(root=None, filenames=None): + root = root or os.getcwd() + filenames = filenames or CONFIG_FILENAMES + + path = os.path.join(root, filenames[0]) + + log.info("Generating sample config: %s", path) + with open(path, "w") as config: + config.write(SAMPLE_CONFIG + "\n") + + return path + + +def parse_config(path): + data = OrderedDict() # type: ignore + + log.info("Parsing config file: %s", path) + config = configparser.ConfigParser() + config.read(path) + + for section in config.sections(): + data[section] = OrderedDict() + for name, value in config.items(section): + data[section][name] = value + + for name in data: + version = data[name].get("version") or "" + data[name]["version"] = version + data[name]["patterns"] = [v.strip() for v in version.split("||")] + + data[name]["optional"] = data[name].get( + "optional", "false" + ).strip().lower() in ("true", "yes", "y", True) + + return data + + +def check_dependencies(config): + success = [] + + for name, settings in config.items(): + show("Checking for {0}...".format(name), head=True) + output = get_version(settings["cli"], settings.get("cli_version_arg")) + + for pattern in settings["patterns"]: + if match_version(pattern, output): + show(_("~") + " MATCHED: {0}".format(pattern or "")) + success.append(_("~")) + break + else: + if settings.get("optional"): + show(_("?") + " EXPECTED (OPTIONAL): {0}".format(settings["version"])) + success.append(_("?")) + else: + if QUIET: + if "not found" in output: + actual = "Not found" + else: + actual = output.split("\n", maxsplit=1)[0].strip(".") + expected = settings["version"] or "" + print("{0}: {1}, EXPECTED: {2}".format(name, actual, expected)) + show( + _("x") + + " EXPECTED: {0}".format(settings["version"] or "") + ) + success.append(_("x")) + if settings.get("message"): + show(_("#") + " MESSAGE: {0}".format(settings["message"])) + + show("Results: " + " ".join(success), head=True) + + return _("x") not in success + + +def get_version(program, argument=None): + if argument is None: + args = [program, "--version"] + elif argument: + args = [program] + argument.split() + else: + args = [program] + + show("$ {0}".format(" ".join(args))) + output = call(args) + lines = output.splitlines() + + if lines: + for line in lines: + if any(char.isdigit() for char in line): + show(line) + break + else: + show(lines[0]) + else: + show("") + + return output + + +def match_version(pattern, output): + lines = output.splitlines() + if not lines: + return False + if "not found" in lines[0]: + return False + if re.match(r"No .+ executable found", " ".join(lines)): + return False + + regex = pattern.replace(".", r"\.") + r"(\b|/)" + + for line in lines: + log.debug("Matching %s: %s", regex, line) + match = re.match(regex, line) + if match is None: + log.debug("Matching %s: %s", regex, line) + match = re.match(r".*[^\d.]" + regex, line) + if match: + return True + + return False + + +def call(args): + try: + process = Popen(args, stdout=PIPE, stderr=STDOUT) + except OSError: + log.debug("Command not found: %s", args[0]) + output = "sh: command not found: {0}".format(args[0]) + else: + raw = process.communicate()[0] + output = raw.decode("utf-8").strip() + log.debug("Command output: %r", output) + + return output + + +def show(text, start="", end="\n", head=False): + """Python 2 and 3 compatible version of print.""" + if QUIET: + return + + if head: + start = "\n" + end = "\n\n" + + if log.getEffectiveLevel() < logging.WARNING: + log.info(text) + else: + formatted = start + text + end + if PY2: + formatted = formatted.encode("utf-8") + sys.stdout.write(formatted) + sys.stdout.flush() + + +def _(word, is_tty=None, supports_utf8=None, supports_ansi=None): + """Format and colorize a word based on available encoding.""" + formatted = word + + if is_tty is None: + is_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() + if supports_utf8 is None: + supports_utf8 = str(sys.stdout.encoding).lower() == "utf-8" + if supports_ansi is None: + supports_ansi = sys.platform != "win32" or "ANSICON" in os.environ + + style_support = supports_utf8 + color_support = is_tty and supports_ansi + + if style_support: + formatted = STYLE.get(word, word) + + if color_support and COLOR.get(word): + formatted = COLOR[word] + formatted + COLOR[None] + + return formatted + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/bin/verchew-wrapper b/bin/verchew-wrapper new file mode 100644 index 0000000..d494c43 --- /dev/null +++ b/bin/verchew-wrapper @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# -*- coding: utf-8 -*- +# +# See `verchew` for licensing and documentation links. + +set -e +set -o pipefail + +BIN=$(dirname $(realpath $0)) + +if [ -e "${BIN}/verchew" ]; then + python3 "${BIN}/verchew" "$@" +elif [ -e "${BIN}/script.py" ]; then + python3 "${BIN}/script.py" "$@" +else + echo "ERROR: 'verchew' script is missing, run 'verchew --vendor' again" + exit 1 +fi diff --git a/docs/about/changelog.md b/docs/about/changelog.md new file mode 100644 index 0000000..699cc9e --- /dev/null +++ b/docs/about/changelog.md @@ -0,0 +1 @@ +../../CHANGELOG.md \ No newline at end of file diff --git a/docs/about/contributing.md b/docs/about/contributing.md new file mode 100644 index 0000000..f939e75 --- /dev/null +++ b/docs/about/contributing.md @@ -0,0 +1 @@ +../../CONTRIBUTING.md \ No newline at end of file diff --git a/docs/about/license.md b/docs/about/license.md new file mode 100644 index 0000000..f0608a6 --- /dev/null +++ b/docs/about/license.md @@ -0,0 +1 @@ +../../LICENSE.md \ No newline at end of file diff --git a/docs/advanced.md b/docs/advanced.md new file mode 100644 index 0000000..1eb4cef --- /dev/null +++ b/docs/advanced.md @@ -0,0 +1,7 @@ +# Advanced + +## Feature A + +## Feature B + +## Feature C diff --git a/filecloudapi/__init__.py b/filecloudapi/__init__.py new file mode 100644 index 0000000..700cfe8 --- /dev/null +++ b/filecloudapi/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +from .fcserver import FCServer +from .exceptions import ServerError +from .datastructures import EntryType, FileList, FileListEntry + +__ALL__ = [ + "FCServer", + "ServerError", + "EntryType", + "FileList", + "FileListEntry", +] diff --git a/filecloudapi/__main__.py b/filecloudapi/__main__.py new file mode 100644 index 0000000..1ce21fa --- /dev/null +++ b/filecloudapi/__main__.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python +# Copyright (c) 2024 FileCloud. All Rights Reserved. + +"""Package entry point.""" + + +from filecloudapi.cli import cli + + +if __name__ == '__main__': # pragma: no cover + cli() # pylint: disable=no-value-for-parameter diff --git a/filecloudapi/cli.py b/filecloudapi/cli.py new file mode 100644 index 0000000..d9675d6 --- /dev/null +++ b/filecloudapi/cli.py @@ -0,0 +1,54 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +""" +This module provides a command-line interface (CLI) for interacting with the FileCloud API. +Commands: + upload-file: Uploads a file to the specified remote location on the FileCloud server. +Usage: + To use the CLI, run the script and follow the prompts for username, password, and server URL. + Example: + python cli.py upload-file + The `upload-file` command requires the following arguments: + local: The path to the local file to be uploaded. + remote: The remote path where the file should be uploaded on the FileCloud server. + Global options: + --username: Username for authentication. + --password: Password for authentication (can be set via the FILECLOUD_PASSWORD environment variable). + --server-url: URL of the FileCloud server. +""" + +from dataclasses import dataclass +from pathlib import Path +import click +import os + +from filecloudapi.fcserver import FCServer + +@dataclass +class ServerConfig: + server_url: str + username: str + password: str + +@click.option('-s', '--server-url', prompt=True, hide_input=False, help='URL of the FileCloud server') +@click.option('-u', '--username', prompt=True, hide_input=False, help='Username for authentication') +@click.option('-p', '--password', prompt=True, hide_input=True, default=lambda: os.environ.get('FILECLOUD_PASSWORD', ''), help='Password for authentication') +@click.group() +@click.pass_context +def cli(ctx, server_url: str, username: str, password: str): + ctx.obj = ServerConfig(server_url, username, password) + pass + +def create_fcserver(config: ServerConfig) -> FCServer: + return FCServer(config.server_url, email=None, username=config.username, password=config.password) + +@cli.command() +@click.option('-l', '--local', type=Path, required=True, help='Path to the local file to be uploaded') +@click.option('-r', '--remote', type=str, required=True, help='Remote path where the file should be uploaded on the FileCloud server') +@click.pass_obj +def upload_file(config: ServerConfig, local: Path, remote: str): + fcserver = create_fcserver(config) + fcserver.upload_file(local, remote) + + +if __name__ == '__main__': # pragma: no cover + cli() diff --git a/filecloudapi/datastructures.py b/filecloudapi/datastructures.py new file mode 100644 index 0000000..daa20a4 --- /dev/null +++ b/filecloudapi/datastructures.py @@ -0,0 +1,293 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +from dataclasses import dataclass +from enum import Enum +from typing import Optional +from xml.etree.ElementTree import Element + + +class EntryType(Enum): + dir = "dir" + file = "file" + + +class SharedType(Enum): + notshared = "" + private = "private" + public = "public" + + +class AclEntryType(Enum): + user = "user" + + +@dataclass +class FileListEntry: + path: str + dirpath: str + name: str + ext: str + fullsize: int + modified: str + type: EntryType + fullfilename: str + size: str + modifiedepoch: str + isroot: bool + locked: bool + isshared: SharedType + modifiedepochutc: str + canupload: bool + candownload: bool + canrename: bool + cansetacls: bool + isshareable: bool + issyncable: bool + isdatasyncable: bool + + +@dataclass +class FileVersion: + versionnumber: str + size: str + how: str + createdon: str + createdby: str + filename: str + sizeinbytes: str + fileid: str + + +@dataclass +class FileList: + parentpath: str + total: int + realpath: str + isroot: bool + entries: list[FileListEntry] + + +class SortBy(Enum): + NAME = "name" + DATE = "date" + SIZE = "size" + + +class SortDir(Enum): + ascending = 1 + descending = -1 + + +@dataclass +class FCShare: + shareid: str + sharename: str + sharelocation: str + allowpublicaccess: bool + allowpublicupload: bool + allowpublicviewonly: bool + allowpublicuploadonly: bool + maxdownloads: Optional[int] = 0 + validityperiod: Optional[str] = "" + + +@dataclass +class FCShareUser: + name: str + read: bool + write: bool + sync: bool + share: bool + download: bool + disallowdelete: bool + allowmanage: bool + + def __getitem__(self, key): + if key in self.__annotations__: + return getattr(self, key) + else: + raise KeyError(f"'{key}' property not found") + + +@dataclass +class FCShareGroup: + groupid: str + groupname: str + read: bool + write: bool + sync: bool + share: bool + download: bool + disallowdelete: bool + + def __getitem__(self, key): + if key in self.__annotations__: + return getattr(self, key) + else: + raise KeyError(f"'{key}' property not found") + + +@dataclass +class ShareActivity: + shareid: str + path: str + name: str + actioncode: int + who: str + when: str + how: str + ip: str + + +@dataclass +class FileLockInfo: + locked: bool + readlock: bool + lockedby: str + + +@dataclass +class TeamFolderInfo: + teamfolderenabled: bool + teamfolderaccount: str + aclenabled: bool + teamfolderpath: Optional[str] = None + + +@dataclass +class NetworkFolderInfo: + networkfoldername: str + + +@dataclass +class RMCClient: + rid: str + remote_client_id: str + remote_client_disp_name: str + remote_client_last_login: str + remote_client_status: int + remote_client_status_message: str + + +@dataclass +class PolicyUser: + username: str + status: int + adminstatus: int + authtype: int + + +@dataclass +class SyncFolder: + path: str + update_version: int + + +@dataclass +class SyncDeltaItem: + type: EntryType + size: int + modified: str + name: str + fullpath: str + flags: str + isdeleted: bool + updateversion: int + candownload: bool + canupload: bool + canrename: bool + + +@dataclass +class AclPermissions: + has_read_permission: bool + has_write_permssion: bool + has_share_permission: bool + has_delete_permission: bool + + def __init__(self, fromstr: str): + self.has_read_permission = False + self.has_write_permssion = False + self.has_share_permission = False + self.has_delete_permission = False + if "R" in fromstr: + self.has_read_permission = True + if "W" in fromstr: + self.has_write_permssion = True + if "S" in fromstr: + self.has_share_permission = True + if "D" in fromstr: + self.has_delete_permission = True + + def __str__(self) -> str: + ret = "" + if self.has_read_permission: + ret += "R" + if self.has_write_permssion: + ret += "W" + if self.has_share_permission: + ret += "S" + if self.has_delete_permission: + ret += "D" + return ret + + +class UserStatus(Enum): + GUEST = 0 + FULL = 1 + DISABLED = 2 + EXTERNAL = 3 + UNKNOWN = -1 + + +@dataclass +class PolicyEntry: + """Represents a policy entry""" + + policyid: str + policyname: str + + +class PolicyList: + """Convienience class represents policy list""" + + entries: list[PolicyEntry] = [] + + def first(self) -> PolicyEntry | None: + if len(self.entries) >= 1: + return self.entries[0] + return None + + def __iter__(self): + return iter(self.entries) + + def __init__(self, a_response: Element): + """""" + self._set_entries(response=a_response) + + def _set_entries(self, response: Element): + a_list = list(response) + + for elem in a_list: + if elem.tag != "policy": + continue + + an_entry = PolicyEntry( + policyid=list(elem)[0].text, # type:ignore + policyname=list(elem)[1].text, # type:ignore + ) + self.entries.append(an_entry) + + +@dataclass +class StorageRootDetails: + type: str + name: str + + +@dataclass +class ProfileSettings: + nickname: str + peerid: str + displayname: str + email: str + isremote: int diff --git a/filecloudapi/exceptions.py b/filecloudapi/exceptions.py new file mode 100644 index 0000000..b3b3993 --- /dev/null +++ b/filecloudapi/exceptions.py @@ -0,0 +1,14 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. + + +class ServerError(Exception): + """ + Generic Error with FileCloud server (connection) + """ + + def __init__(self, code: str, message: str): + """ + Initialize the exception with code and message + """ + super().__init__(message) + self.code = code diff --git a/filecloudapi/fcserver.py b/filecloudapi/fcserver.py new file mode 100644 index 0000000..de46c88 --- /dev/null +++ b/filecloudapi/fcserver.py @@ -0,0 +1,2133 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +from io import SEEK_CUR, SEEK_END, SEEK_SET, BufferedReader, BytesIO +from typing import Dict +import requests +import logging +import xml.etree.ElementTree as ET +from typing import Union, Optional +import pathlib +import time +from urllib.parse import urlencode +import datetime +from pathlib import Path +from urllib3.filepost import RequestField, encode_multipart_formdata + +from requests.adapters import HTTPAdapter +from urllib3 import Retry + +from .datastructures import ( + AclEntryType, + AclPermissions, + EntryType, + FileListEntry, + FileVersion, + FileList, + FCShare, + FileLockInfo, + SharedType, + FCShareUser, + SyncDeltaItem, + SyncFolder, + TeamFolderInfo, + NetworkFolderInfo, + RMCClient, + PolicyUser, + SortBy, + SortDir, + UserStatus, + PolicyList, + PolicyEntry, + FCShareGroup, + ShareActivity, +) +from .exceptions import ServerError +import re + + +def str_to_bool(value): + return value.lower() in ("true", "1", "yes") + + +log = logging.getLogger(__name__) + + +class FCServer: + """ + FileCloud Server API + """ + + def __init__( + self, + url: str, + email: Optional[str], + username: str, + password: str, + adminlogin: bool = False, + signinusingusername: bool = True, + twofakeyfun=None, + withretries: bool = True, + login: bool = True, + ) -> None: + self.email = email + self.username = username + self.password = password + self.signinusingusername = signinusingusername + self.twofakeyfun = twofakeyfun + self.url = url + self.adminlogin = adminlogin + self.session = requests.session() + + if withretries: + self.retries = Retry(total=10, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504]) # type: ignore + else: + self.retries = None + + self.session.mount("http://", HTTPAdapter(max_retries=self.retries)) + self.session.mount("https://", HTTPAdapter(max_retries=self.retries)) + + # Trim trailing slashes, FileCloud generates an error if the URL starts with // + if self.url[-1] == "/": + self.url = self.url[:-1] + + if login: + if adminlogin: + self.login_as_admin() + else: + self.login() + + def _api_call(self, method: str, params: Dict) -> ET.Element: + """ + Perform a FC API call (post) + """ + resp = self.session.post(self.url + method, data=params) + resp.raise_for_status() + self.last_headers = resp.headers + return ET.fromstring(resp.content) + + def _api_call_raw(self, method: str, params: Dict) -> str: + """ + Perform a FC API call (post) and return raw string for i.e. getuploadform + """ + resp = self.session.post(self.url + method, data=params) + resp.raise_for_status() + self.last_headers = resp.headers + return str(resp.content) + + def _admin_api_call_setlicense( + self, method: str, params: Dict, files: Optional[Dict] = None + ) -> bool: + """ + Perform a FC API call (post) to save the license. The response is either OK or Invalid xml. + """ + resp = self.session.post(self.url + method, data=params, files=files) + resp.raise_for_status() + self.last_headers = resp.headers + + return str(resp.text) == "OK" + + def _extract_server_error_code(self, msg: str) -> str: + code = re.search(r"CLFC-\d+(?:-\d+)?", msg) + if code: + return code.group() + else: + return "" + + def _raise_exception_from_server_message(self, msg: str) -> None: + """ + Raise ServerError from server message + """ + raise ServerError(self._extract_server_error_code(msg), msg) + + def _raise_exception_from_command(self, resp: ET.Element): + """ + Raise Server Error from command response if result is not 1 + """ + result = resp.findtext("./command/result", "0") + + if int(result) != 1: + self._raise_exception_from_server_message(resp.findtext("./command/message", "")) + + def login(self) -> None: + """ + Try to login to FC server with the credentials + provided at init + """ + resp = self._api_call( + "/core/loginguest", + { + "userid": self.username if self.signinusingusername else self.email, + "password": self.password, + }, + ) + + res = int(resp.findtext("./command/result", "0")) + + if res == 3: + # 2FA + token = resp.findtext("./command/message", "") + if self.twofakeyfun is None: + raise ValueError("2FA required but no twofakeyfun provided") + + code: str = self.twofakeyfun() + resp = self._api_call( + "/core/2falogin", + { + "userid": self.username, + "code": code, + "token": token, + "password": self.password, + }, + ) + + ok = int(resp.findtext("./command/result", "0")) == 1 + + if ok: + # We need a new code for the next login + while code == self.twofakeyfun(): + time.sleep(1) + + self._raise_exception_from_command(resp) + + def login_as_admin(self) -> None: + """ + Try to login to FC server admin portal with the credentials + provided at init + """ + resp = self._api_call( + "/admin/adminlogin", + { + "adminuser": self.username if self.signinusingusername else self.email, + "adminpassword": self.password, + }, + ) + + res = int(resp.findtext("./command/result", "0")) + + if res == 3: + # 2FA + token = resp.findtext("./command/message", "") + if self.twofakeyfun is None: + raise ValueError("2FA required but no twofakeyfun provided") + code = self.twofakeyfun() + resp = self._api_call( + "/core/2falogin", + { + "userid": self.username, + "code": code, + "token": token, + "password": self.password, + }, + ) + + ok = int(resp.findtext("./command/result", "0")) == 1 + + if ok: + # We need a new code for the next login + while code == self.twofakeyfun(): + time.sleep(1) + + self._raise_exception_from_command(resp) + + def _parseEntry(self, entry: ET.Element) -> FileListEntry: + """ + Parse a file entry (e.g. returned by getfilelist) + """ + fullsize_str = entry.findtext("./fullsize") + + if fullsize_str == None: + raise ValueError("fullsize in file entry is None in server response") + + fullsize = 0 + if len(fullsize_str) > 0: + fullsize = int(fullsize_str) + + def bool_opt(txt: Optional[str]) -> bool: + if txt is not None and len(txt) > 0: + return int(txt) > 0 + else: + return False + + def shared_opt(txt: Optional[str]) -> SharedType: + if txt is not None: + if txt == "": + return SharedType.notshared + elif txt == "public": + return SharedType.public + elif txt == "private": + return SharedType.private + else: + assert txt == "" + return SharedType.notshared + else: + return SharedType.notshared + + return FileListEntry( + entry.findtext("./path", ""), + entry.findtext("./dirpath", ""), + entry.findtext("./name", ""), + entry.findtext("./ext", ""), + fullsize, + entry.findtext("./modified", ""), + EntryType(entry.findtext("./type", EntryType.file.value)), + entry.findtext("./fullfilename", ""), + entry.findtext("./size", ""), + entry.findtext("./modifiedepoch", ""), + bool_opt(entry.findtext("./isroot")), + bool_opt(entry.findtext("./locked")), + shared_opt(entry.findtext("./isshared")), + entry.findtext("./modifiedepochutc", ""), + entry.findtext("./canupload", "1") == "1", + entry.findtext("./candownload", "1") == "1", + entry.findtext("./canrename", "1") == "1", + entry.findtext("./cansetacls", "0") == "1", + entry.findtext("./isshareable", "1") == "1", + entry.findtext("./issyncable", "1") == "1", + entry.findtext("./isdatasyncable", "1") == "1", + ) + + def getfilelist( + self, + path: str, + sortdir: SortDir = SortDir.ascending, + start: int = 0, + limit: int = 1000, + sortby: SortBy = SortBy.NAME, + adminproxyuserid: str = "", + ) -> FileList: + """ + Returns a list of files/directories in 'path' + """ + resp = self._api_call( + "/core/getfilelist", + { + "path": path, + "sortdir": sortdir.value, + "sortby": sortby.value, + "start": start, + "limit": limit, + "sendaboutinfo": 1, + "sendmetadatasetinfo": 1, + "sendcommentinfo": 1, + "sendfavinfo": 1, + "adminproxyuserid": adminproxyuserid, + "includeextrafields": 1, + }, + ) + + meta = resp.find("./meta") + if meta == None: + raise ValueError("No meta in server response") + + result = int(meta.findtext("./result", "0")) + + if result != 1: + raise ValueError("Result at /meta/result not 1 in server response") + + entries: list[FileListEntry] = [] + + def bool_opt(txt: Union[None, str]) -> bool: + if txt != None and len(txt) > 0: + return int(txt) > 0 + else: + return False + + for entry in resp.findall("./entry"): + entries.append(self._parseEntry(entry)) + + return FileList( + meta.findtext("./parentpath", ""), + int(meta.findtext("./total", 0)), + meta.findtext("./realpath", ""), + bool_opt(meta.findtext("./isroot", "")), + entries, + ) + + def fileinfo_no_retry(self, path: str) -> FileListEntry: + """ + Returns information about file/directory 'path' + """ + resp = self._api_call("/core/fileinfo", {"file": path}) + + entry = resp.find("./entry") + + if entry == None: + raise FileNotFoundError(f"File '{path}' does not exist") + + return self._parseEntry(entry) + + def admin_search( + self, + keyword="", + groupidnin="", + externalin="", + status="", + source="", + statusnin="", + start=0, + end=10, + admin="", + policyidnin="", + ) -> list[ET.Element]: + """ + Search method to get all users + """ + resp = self._api_call( + "/admin/search", + { + "op": "search", + "keyword": keyword, + "groupidnin": groupidnin, + "externalin": externalin, + "status": status, + "source": source, + "statusnin": statusnin, + "start": start, + "end": end, + "admin": admin, + "policyidnin": policyidnin, + }, + ) + entries = resp.findall("./user") + + if entries is None: + raise ValueError("No users found") + + return entries + + def fileinfo(self, path: str) -> FileListEntry: + """ + Returns information about file/directory 'path'. Retries + """ + if self.retries is None: + return self.fileinfo_no_retry(path) + + retries = self.retries.new() + while True: + try: + return self.fileinfo_no_retry(path) + except: + retries = retries.increment() + time.sleep(retries.get_backoff_time()) + + def fileversions( + self, filepath: str, filename: str, checksum: bool = True + ) -> list[FileVersion]: + """ + Get all the available previous versions of a file + """ + resp = self._api_call( + "/core/getversions", + { + "filepath": filepath, + "filename": filename, + "checksum": 1 if checksum else 0, + }, + ) + + entries: list[FileVersion] = [] + + for entry in resp.findall("./version"): + version = FileVersion( + versionnumber=entry.findtext("./versionnumber", ""), + size=entry.findtext("./size", ""), + how=entry.findtext("./how", ""), + createdon=entry.findtext("./createdon", ""), + createdby=entry.findtext("./createdby", ""), + filename=entry.findtext("./filename", ""), + sizeinbytes=entry.findtext("./sizeinbytes", ""), + fileid=entry.findtext("./fileid", ""), + ) + + entries.append(version) + + return entries + + def fileexists_no_retry(self, path: str, caseinsensitive: bool = False) -> bool: + """ + Returns True if file 'path' exists else False + """ + resp = self._api_call( + "/core/fileexists", {"file": path, "caseinsensitive": 1 if caseinsensitive else 0} + ) + + return int(resp.findtext("./command/result", "0")) == 1 + + def fileexists(self, path: str, caseinsensitive: bool = False) -> bool: + """ + Returns True if file 'path' exists else False. Retries + """ + if self.retries is None: + return self.fileexists_no_retry(path, caseinsensitive) + + retries = self.retries.new() + while True: + try: + return self.fileexists_no_retry(path, caseinsensitive) + except: + retries = retries.increment() + time.sleep(retries.get_backoff_time()) + + def waitforfile(self, path: str, maxwaits: float = 30, fsize: int = -1) -> None: + """ + Waits for file at 'path' to exists for max 'maxwaits' seconds. + If fsize != -1 also wait for file to have size fsize. + """ + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + if self.fileexists(path): + if fsize == -1: + log.info(f"Found {path} after {(time.monotonic() - starttime)} seconds") + return + else: + info = self.fileinfo(path) + if info is not None and info.fullsize == fsize: + log.info( + f"Found {path} with size {fsize} after {(time.monotonic() - starttime)} seconds" + ) + return + + time.sleep(0.1) + + raise TimeoutError(f"File {path} not found after {maxwaits} seconds") + + def waitforfileremoval(self, path: str, maxwaits: float = 30): + """ + Waits for file to not exist at 'path' for max 'maxwaits' seconds + """ + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + if not self.fileexists(path): + return + + time.sleep(0.1) + + raise TimeoutError(f"File {path} not removed after {maxwaits} seconds") + + def downloadfile_no_retry( + self, path: str, dstPath: Union[pathlib.Path, str], redirect: bool = True + ) -> None: + """ + Download file at 'path' to local 'dstPath' + """ + with self.session.get( + self.url + "/core/downloadfile", + params={ + "filepath": path, + "filename": path.split("/")[-1], + "redirect": 1 if redirect else 0, + }, + stream=True, + ) as resp: + resp.raise_for_status() + with open(dstPath, "wb") as dstF: + for chunk in resp.iter_content(128 * 1024): + dstF.write(chunk) + + def downloadfile( + self, path: str, dstPath: Union[pathlib.Path, str], redirect: bool = True + ) -> None: + """ + Download file at 'path' to local 'dstPath'. Retries. + """ + if self.retries is None: + return self.downloadfile_no_retry(path, dstPath, redirect) + + retries = self.retries + while True: + try: + self.downloadfile_no_retry(path, dstPath, redirect) + return + except: + retries = retries.increment() + time.sleep(retries.get_backoff_time()) + + def downloadfolder(self, path: str, dstPath: Union[pathlib.Path, str]) -> None: + """ + Recursively download files/directories at 'path' + to local path 'dstPath' + """ + files = self.getfilelist(path) + + for file in files.entries: + dstFn = Path(dstPath) / file.name + if file.type == EntryType.dir: + if not dstFn.is_dir(): + dstFn.mkdir() + + self.downloadfolder(path + "/" + file.name, dstFn) + else: + self.downloadfile(path + "/" + file.name, dstFn) + + def deletefile(self, path: str, adminproxyuserid: Optional[str] = None): + """ + Delete file at 'path' + """ + dir = "/".join(path.split("/")[:-1]) + name = path.split("/")[-1] + + resp = self._api_call( + "/core/deletefile", + {"path": dir, "name": name, "adminproxyuserid": adminproxyuserid}, + ) + self._raise_exception_from_command(resp) + + def upload_bytes( + self, + data: bytes, + serverpath: str, + datemodified: datetime.datetime = datetime.datetime.now(), + ) -> None: + """ + Upload bytes 'data' to server at 'serverpath'. + """ + self.upload(BufferedReader(BytesIO(data)), serverpath, datemodified) # type: ignore + + def upload_str( + self, + data: str, + serverpath: str, + datemodified: datetime.datetime = datetime.datetime.now(), + ) -> None: + """ + Upload str 'data' UTF-8 encoded to server at 'serverpath'. + """ + self.upload_bytes(data.encode("utf-8"), serverpath, datemodified) + + def upload_file( + self, + localpath: pathlib.Path, + serverpath: str, + datemodified: datetime.datetime = datetime.datetime.now(), + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Upload file at 'localpath' to server at 'serverpath'. + """ + with open(localpath, "rb") as uploadf: + self.upload( + uploadf, + serverpath, + datemodified, + adminproxyuserid=adminproxyuserid, + ) + + def _serverdatetime(self, dt: datetime.datetime): + return "%04d-%02d-%02d %02d:%02d:%02d" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + ) + + def upload( + self, + uploadf: BufferedReader, + serverpath: str, + datemodified: datetime.datetime, + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Upload seekable stream at uploadf to server at 'serverpath' + """ + dir = "/".join(serverpath.split("/")[:-1]) + name = serverpath.split("/")[-1] + + assert uploadf.seekable(), "Upload stream must be seekable" + + data_marker = b"DATA_MARKER" + + class FileSlice(BufferedReader): + def __init__(self, stream: BufferedReader, start: int, size: int, envelope: bytes): + super().__init__(stream) # type: ignore + self.start = start + self.end = start + size + self.pos = start + self.envelope_prefix = envelope[: envelope.index(data_marker)] + self.envelope_read = 0 + self.envelope_suffix = envelope[envelope.index(data_marker) + len(data_marker) :] + super().seek(start) + + def read(self, size=-1): + # Read the envelope first + if self.pos == self.end and self.envelope_read < len(self.envelope_suffix) + len( + self.envelope_prefix + ): + if size < 0 or size > len(self.envelope_suffix): + size = len(self.envelope_suffix) + data = self.envelope_suffix[ + self.envelope_read + - len(self.envelope_prefix) : self.envelope_read + - len(self.envelope_prefix) + + size + ] + self.envelope_read += len(data) + return data + if self.pos >= self.end: + return b"" + # Read then end of the envelope + if self.pos == self.start and self.envelope_read < len(self.envelope_prefix): + if size < 0 or size > len(self.envelope_prefix): + size = len(self.envelope_prefix) + data = self.envelope_prefix[self.envelope_read : self.envelope_read + size] + self.envelope_read += len(data) + return data + # Read the file + max_read = self.end - self.pos + if size < 0: + size = min(256 * 1024, max_read) + else: + size = min(size, max_read) + data = super().read(size) + self.pos += len(data) + return data + + def __len__(self) -> int: + return self.end - self.start + len(self.envelope_prefix) + len(self.envelope_suffix) + + def __iter__(self): + return self + + def tell(self) -> int: + if self.pos == self.start: + return 0 + if self.pos == self.end: + return len(self) + return self.pos - self.start + self.envelope_read + + def seek(self, offset: int, whence: int = 0) -> int: + if whence == SEEK_SET: # from the start + self.pos = self.start + offset + elif whence == SEEK_CUR: # from the current position + self.pos += offset + elif whence == SEEK_END: # from the end + self.pos = self.end + offset + else: + raise ValueError(f"Invalid value for whence: {whence}") + + if self.pos < self.start: + self.pos = self.start + elif self.pos > self.end: + self.pos = self.end + + if self.pos == self.start: + self.envelope_read = 0 + + super().seek(self.pos) + + return self.pos + + def close(self): + pass + + slice_size = 20 * 1024 * 1024 # 20 MiB + pos = 0 + + uploadf.seek(0, 2) + data_size = uploadf.tell() + + if data_size == 0: + # Special case for empty files + params = { + "appname": "explorer", + "path": dir, + "offset": 0, + "complete": 1, + "filename": name, + "filesize": 0, + "date": self._serverdatetime(datemodified), + "adminproxyuserid": adminproxyuserid, + } + params_str = urlencode(params) + + if params_str.find("%2FSHARED%2F%21"): + params_str = params_str.replace( + "%%2FSHARED%2F%21", "%2FSHARED%2F!" + ) # WEBUI DOES NOT ENCODE THE ! + + resp = self.session.post( + self.url + "/core/upload?" + params_str, files={"file_contents": (name, b"")} + ) + + resp.raise_for_status() + + if resp.text != "OK": + log.warning(f"Upload error. Response: {resp.text}") + raise ServerError("", resp.text) + + return + + rf = RequestField(name="file_contents", data=data_marker, filename=name) + rf.make_multipart() + + envelope, content_type = encode_multipart_formdata([rf]) + + headers = {"Content-type": content_type} + + while pos < data_size or (data_size == 0 and pos == 0): + + curr_slice_size = min(slice_size, data_size - pos) + complete = 0 if pos + curr_slice_size < data_size else 1 + + params = { + "appname": "explorer", + "path": dir, + "offset": pos, + "complete": complete, + "filename": name, + "date": self._serverdatetime(datemodified), + "adminproxyuserid": adminproxyuserid, + } + + if data_size is not None: + params["filesize"] = data_size + + params_str = urlencode(params) + + if params_str.find("%2FSHARED%2F%21"): + params_str = params_str.replace( + "%%2FSHARED%2F%21", "%2FSHARED%2F!" + ) # WEBUI DOES NOT ENCODE THE ! + + resp = self.session.post( + self.url + "/core/upload?" + params_str, + data=FileSlice(uploadf, pos, curr_slice_size, envelope), + headers=headers, + stream=True, + ) + + resp.raise_for_status() + + if resp.text != "OK": + log.warning(f"Upload error. Response: {resp.text}") + raise ServerError("", resp.text) + + pos += curr_slice_size + + def share(self, path: str, adminproxyuserid: str = "") -> FCShare: + """ + Share 'path' + """ + resp = self._api_call( + "/core/addshare", + { + "sharelocation": path, + "adminproxyuserid": adminproxyuserid, + "sharename": path.split("/")[:-1], + }, + ) + + shareid = resp.findtext("./share/shareid", "") + + if not shareid: + msg = resp.findtext("./meta/message", "") + if msg: + raise ServerError("", msg) + else: + raise ServerError("", "No shareid in response") + + return FCShare( + shareid, + resp.findtext("./share/sharename", ""), + resp.findtext("./share/sharelocation", ""), + str_to_bool(resp.findtext("./share/allowpublicaccess", "")), + str_to_bool(resp.findtext("./share/allowpublicupload", "")), + str_to_bool(resp.findtext("./share/allowpublicviewonly", "")), + str_to_bool(resp.findtext("./share/allowpublicuploadonly", "")), + ) + + def getshareforpath(self, path: str, adminproxyuserid: str = "") -> FCShare: + """ + Share 'path' + """ + resp = self._api_call( + "/core/getshareforpath", + {"path": path, "adminproxyuserid": adminproxyuserid}, + ) + + return FCShare( + resp.findtext("./share/shareid", ""), + resp.findtext("./share/sharename", ""), + resp.findtext("./share/sharelocation", ""), + str_to_bool(resp.findtext("./share/allowpublicaccess", "")), + str_to_bool(resp.findtext("./share/allowpublicupload", "")), + str_to_bool(resp.findtext("./share/allowpublicviewonly", "")), + str_to_bool(resp.findtext("./share/allowpublicuploadonly", "")), + ) + + def setallowpublicaccess( + self, + share: FCShare, + allowpublicaccess: bool, + allowpublicviewonly: bool = False, + allowpublicuploadonly: bool = False, + allowpublicupload: bool = False, + sharepassword: str = "", + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Set access permissions to share + """ + resp = self._api_call( + "/core/setallowpublicaccess", + { + "shareid": share.shareid, + "allowpublicaccess": 1 if allowpublicaccess else 0, + "allowpublicviewonly": 1 if allowpublicviewonly else 0, + "allowpublicuploadonly": 1 if allowpublicuploadonly else 0, + "allowpublicupload": 1 if allowpublicupload else 0, + "sharepassword": sharepassword, + "adminproxyuserid": adminproxyuserid, + }, + ) + self._raise_exception_from_command(resp) + + def adduserstoshare( + self, + share: FCShare, + users: list[str], + sendemail: bool = False, + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Allow users access to share + """ + resp = self._api_call( + "/core/adduserstoshare", + { + "shareid": share.shareid, + "users": ",".join(users), + "sendemail": 1 if sendemail else 0, + "adminproxyuserid": adminproxyuserid if adminproxyuserid else "", + }, + ) + + self._raise_exception_from_command(resp) + + def createfolder( + self, path: str, subpath: Optional[str] = None, adminproxyuserid: Optional[str] = None + ) -> None: + """ + Create folder at 'path' + """ + dir = "/".join(path.split("/")[:-1]) + name = path.split("/")[-1] + + payload = { + "name": name, + "path": dir, + "adminproxyuserid": adminproxyuserid, + } + if subpath is not None: + payload["subpath"] = subpath + + resp = self._api_call( + "/core/createfolder", + payload, + ) + + self._raise_exception_from_command(resp) + + def renamefile(self, path: str, name: str, newname) -> None: + """ + Rename a file + """ + resp = self._api_call("/core/renamefile", {"path": path, "name": name, "newname": newname}) + self._raise_exception_from_command(resp) + + def get_username(self): + """ + Return the username/profile name specified at init + """ + return self.username + + def setuseraccessforshare( + self, + share: Optional[FCShare], + userid: str, + allowmanage: bool, + allowwrite: bool, + allowdownload: bool, + allowshare: bool, + allowsync: bool, + disallowdelete: bool, + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Set user permissions for share + """ + resp = self._api_call( + "/core/setuseraccessforshare", + { + "shareid": share.shareid if share else "false", + "userid": userid, + "allowmanage": "true" if allowmanage else "false", + "write": "true" if allowwrite else "false", + "download": "true" if allowdownload else "false", + "share": "true" if allowshare else "false", + "sync": "true" if allowsync else "false", + "disallowdelete": "true" if disallowdelete else "false", + "adminproxyuserid": adminproxyuserid if adminproxyuserid else "", + }, + ) + self._raise_exception_from_command(resp) + + def getusersforshare(self, share: FCShare) -> list[FCShareUser]: + """ + Returns a list of users that are added explicitly to the share + """ + resp = self._api_call( + "/core/getusersforshare", + {"shareid": share.shareid}, + ) + entries: list[FCShareUser] = [] + + for entry in resp.findall("./user"): + user = FCShareUser( + name=entry.findtext("./name", ""), + read=entry.findtext("./read") == "true", + write=entry.findtext("./write") == "true", + sync=entry.findtext("./sync") == "true", + share=entry.findtext("./share") == "true", + download=entry.findtext("./download") == "true", + disallowdelete=entry.findtext("./disallowdelete") == "true", + allowmanage=entry.findtext("./allowmanage") == "true", + ) + entries.append(user) + + return entries + + def wait_for_user_to_have_permission_in_share( + self, + share: FCShare, + user: str = "", + permission: str = "", + permission_flag: bool = False, + max_wait: int = 30, + ) -> None: + """ + Waits for a max_wait period of time unless the user specified has the permission specified = permission_flag + """ + + start_time = time.monotonic() + + while time.monotonic() - start_time < max_wait: + users_in_share = self.getusersforshare(share) + + if users_in_share != None and any( + item["name"] == user and item[permission] == permission_flag + for item in users_in_share + ): + return + + time.sleep(0.1) + + raise TimeoutError("User does not have permission in share") + + def lock(self, path: str, readlock: bool = False, relative_expiration: int = 0) -> None: + """ + Lock file at 'path' + str: path to file + readlock: said option + expiration: lock expiry time in seconds + """ + resp = self._api_call( + "/core/lock", + { + "path": path, + "readlock": 1 if readlock else 0, + "relative_expiration": relative_expiration, + }, + ) + self._raise_exception_from_command(resp) + + def unlock(self, path: str) -> None: + """ + Unlock file at 'path' + """ + resp = self._api_call("/core/unlock", {"path": path}) + self._raise_exception_from_command(resp) + + def getfilelockinfo(self, path: str) -> FileLockInfo: + """ + Get information about lock at 'path' + """ + resp = self._api_call("/core/getfilelockinfo", {"path": path}) + + return FileLockInfo( + resp.findtext("./filelockinfo/locked", "0") == "1", + resp.findtext("./filelockinfo/readlock", "0") == "1", + resp.findtext("./filelockinfo/lockedby", ""), + ) + + def waitforlock(self, path: str, maxwaits: float = 30): + """ + Wait for file to get locked at 'path' for max 'maxwaits' seconds + """ + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + li = self.getfilelockinfo(path) + if li != None and li.locked: + return + + time.sleep(0.1) + + raise TimeoutError(f"File {path} not locked after {maxwaits} seconds") + + def waitforlockrelease(self, path: str, maxwaits: float = 30) -> None: + """ + Wait for file to get unlocked at 'path' for max 'maxwaits' seconds + """ + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + li = self.getfilelockinfo(path) + if li != None and not li.locked: + return + + time.sleep(0.1) + + raise TimeoutError(f"File {path} not unlocked after {maxwaits} seconds") + + def copyfile(self, src_path: str, dst_path: str) -> None: + """ + Copy file/directory + """ + dir = "/".join(src_path.split("/")[:-1]) + src_name = src_path.split("/")[-1] + dst_dir = "/".join(dst_path.split("/")[:-1]) + dst_name = dst_path.split("/")[-1] + + resp = self._api_call( + "/app/explorer/copyfile", + {"name": src_name, "path": dir, "copyto": dst_dir, "copytoname": dst_name}, + ) + self._raise_exception_from_command(resp) + + def movefile(self, src_path: str, dst_path: str) -> None: + """ + Move file/directory + """ + resp = self._api_call( + "/app/explorer/renameormove", + {"fromname": src_path, "toname": dst_path, "overwrite": 0}, + ) + self._raise_exception_from_command(resp) + + def movefile_retry(self, src_path: str, dst_path: str) -> None: + """ + Move file/directory. Retries. + """ + if self.retries is None: + return self.movefile(src_path, dst_path) + + retries = self.retries + while True: + try: + self.movefile(src_path, dst_path) + return + except: + retries = retries.increment() + time.sleep(retries.get_backoff_time()) + + def getrmcclients(self) -> list[RMCClient]: + """ + Returns a list of clients that need approval + """ + resp = self._api_call( + "/core/getrmcclients", {"userid": self.username, "start": 0, "end": 1000000} + ) + + entries: list[RMCClient] = [] + + for entry in resp.findall("./rmc_client"): + + ne = RMCClient( + rid=entry.findtext("./rid", ""), + remote_client_id=entry.findtext("./remote_client_id", ""), + remote_client_disp_name=entry.findtext("./remote_client_disp_name", ""), + remote_client_last_login=entry.findtext("./remote_client_last_login", ""), + remote_client_status=int(entry.findtext("./remote_client_status", "-1")), + remote_client_status_message=entry.findtext("./remote_client_status_message", ""), + ) + entries.append(ne) + + return entries + + def approvedeviceaccess(self, remote_client_id: str) -> str: + """ + Approve device and return device authentication code + """ + resp = self._api_call( + "/core/approvedeviceaccess", + {"remote_client_id": remote_client_id}, + ) + + self._raise_exception_from_command(resp) + + return resp.findtext("./command/message", "") + + def getteamfolderinfo(self) -> TeamFolderInfo: + """ + Returns Team Folder information + """ + resp = self._api_call("/admin/getteamfolderproperties", {}) + + tf_info = TeamFolderInfo( + resp.findtext("./teamfolderproperty/enabled", "0") == "1", + resp.findtext("./teamfolderproperty/username", ""), + resp.findtext("./teamfolderproperty/aclenabled", "0") == "1", + ) + + tf_list = self.getfilelist(f"/{tf_info.teamfolderaccount}", adminproxyuserid=self.username) + tf_info.teamfolderpath = tf_list.entries[0].path # type:ignore + + return tf_info + + def getnetworkfolderinfo(self) -> NetworkFolderInfo: + """ + Returns a network folder information + """ + resp = self.getfilelist("/EXTERNAL") + return NetworkFolderInfo( + resp.entries[0].path, # type: ignore + ) + + def getsyncfolderlist(self, paths=list[str]()) -> list[SyncFolder]: + """ + Returns list of syncable folders and their current update version + """ + params = {"v": "1", "skipsyncwithevents": "1", "count": len(paths)} + idx = 1 + for path in paths: + params[f"path{idx}"] = path + idx += 1 + + resp = self._api_call("/app/sync/getsyncfolderlist", params) + + sync_folders: list[SyncFolder] = [] + + for entry in resp.findall("./syncfolder"): + + status_set = entry.findtext("./statusset") + + assert status_set is not None + + update_version = int(status_set.split(";")[0].split(",")[1]) + + ne = SyncFolder(entry.findtext("./name", ""), update_version) + sync_folders.append(ne) + + return sync_folders + + def getsyncfolder(self, path: str) -> SyncFolder: + """ + Returns current update version and path for path + """ + paths = list[str]() + if path.startswith("/EXTERNAL/"): + ns = path.index("/", 10) + paths.append(path[:ns]) + + syncfolderlist = self.getsyncfolderlist(paths) + + for syncfolder in syncfolderlist: + + if path.startswith(syncfolder.path): + return syncfolder + + raise ValueError("Path not found in sync folder list") + + def getsyncdelta( + self, sync_folder: SyncFolder, with_permissions: bool = True + ) -> list[SyncDeltaItem]: + """ + Returns sync delta items for path since updateversion + """ + resp = self._api_call( + "/app/sync/getsyncdelta", + { + "friendly": "Python fcserver", + "name": sync_folder.path, + "path": sync_folder.path, + "status": f"server,{sync_folder.update_version};", + "permissions": "1" if with_permissions else "0", + }, + ) + + sync_delta: list[SyncDeltaItem] = [] + + for entry in resp.findall("./record"): + + entry_type = int(entry.findtext("./type", "1")) + + ne = SyncDeltaItem( + EntryType.dir if entry_type == 0 else EntryType.file, + int(entry.findtext("./size", "0")), + entry.findtext("./modified", ""), + entry.findtext("./name", ""), + entry.findtext("./fullpath", ""), + entry.findtext("./flags", ""), + int(entry.findtext("./isdeleted", "0")) == 1, + int(entry.findtext("./updateversion", 0)), + int(entry.findtext("./candownload", "0")) == 1, + int(entry.findtext("./canupload", "0")) == 1, + int(entry.findtext("./canrename", "0")) == 1, + ) + sync_delta.append(ne) + + return sync_delta + + def waitforsyncdeltaitem( + self, + syncfolder: SyncFolder, + find_path: str, + find_size: int, + find_isdeleted: bool, + maxwaits: int = 30, + ) -> None: + """ + Wait for sync delta item at path + """ + + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + + items = self.getsyncdelta(syncfolder, with_permissions=False) + + if items is None: + time.sleep(1) + continue + + for item in items: + + if ( + item.fullpath == find_path + and item.size == find_size + and item.isdeleted == find_isdeleted + ): + return + + syncfolder.update_version = max(item.updateversion, syncfolder.update_version) + + time.sleep(0.1) + + raise TimeoutError(f"Sync delta item not found after {maxwaits} seconds") + + def admin_getrmcclients( + self, username: str = "", end_int: int = 10000, sortdir: str = "-1" + ) -> list[RMCClient]: + """ + Returns a list of clients from admin + """ + resp = self._api_call( + "/admin/getrmcclients", + { + "userid": username if username != "" else self.username, + "start": 0, + "end": end_int, + "sortfield": "remote_client_last_login", + "sortdir": sortdir, + }, + ) + + entries: list[RMCClient] = [] + + for entry in resp.findall("./rmc_client"): + client = RMCClient( + rid=entry.findtext("./rid", ""), + remote_client_id=entry.findtext("./remote_client_id", ""), + remote_client_disp_name=entry.findtext("./remote_client_disp_name", ""), + remote_client_last_login=entry.findtext("./remote_client_last_login", ""), + remote_client_status=int(entry.findtext("./remote_client_status", "-1")), + remote_client_status_message=str( + entry.findtext("./remote_client_status_message", "") + ), + ) + entries.append(client) + + return entries + + def admin_adduser( + self, + username: str = "", + password: str = "", + email: str = "", + display_name: str = "", + authtype: str = "0", # full user + status: str = "1", # active + istfuser: str = "0", # not teamfolder user + sendpw: str = "0", + sendemail: str = "0", + ) -> None: + """ + Returns a newly server-created user credenial + """ + resp = self._api_call( + "/admin/adduser", + { + "op": "adduser", + "username": username, + "displayname": username if display_name == "" else display_name, + "email": email, + "password": password, + "authtype": authtype, + "status": status, + "isteamfolderuser": istfuser, + "sendpwdasplaintext": sendpw, + "sendapprovalemail": sendemail, + }, + ) + + self._raise_exception_from_command(resp) + + def admin_deleteuser(self, profile: str) -> None: + """ + Deletes the user from filecloud + on server 23.241 can use also /admin/deleteuser + """ + resp = self._api_call( + "/admin", + { + "op": "deleteuser", + "profile": profile, + }, + ) + self._raise_exception_from_command(resp) + + def admin_addrmccommand( + self, remote_client_id: str, remote_command_id: str, message: str + ) -> None: + """ + Add a message to the client based on command ID like: + 10000 - display message + 10001 - remote wipe and block login + 10002 - block login + """ + resp = self._api_call( + "/admin/addrmccommand", + { + "remote_client_id": remote_client_id, + "remote_command_id": remote_command_id, + "message": message, + }, + ) + + self._raise_exception_from_command(resp) + + def admin_removermcclient(self, remote_client_id: str): + """ + Removes the commands for the device + """ + resp = self._api_call( + "/admin/removermcclient", + { + "remote_client_id": remote_client_id, + }, + ) + + self._raise_exception_from_command(resp) + + def admin_getusersforpolicy(self, policy_id: str = "") -> Optional[list[PolicyUser]]: + """ + Returns a list of users assigned to policy + """ + resp = self._api_call( + "/admin/getusersforpolicy", + {"policyid": policy_id}, + ) + entries: list[PolicyUser] = [] + + for entry in resp.findall("./user"): + user = PolicyUser( + username=entry.findtext("./username", ""), + status=int(entry.findtext("./status", "1")), + adminstatus=int(entry.findtext("./adminstatus", "0")), + authtype=int(entry.findtext("./authtype", "0")), + ) + entries.append(user) + + return entries + + def admin_assignpolicytouser(self, username: str, policyid: str) -> None: + """ + Assign policy to a user + """ + resp = self._api_call( + "/admin/assignpolicytouser", + { + "username": username, + "policyid": policyid, + }, + ) + self._raise_exception_from_command(resp) + + def admin_resetpolicyforuser(self, username: str) -> None: + """ + Resets policy for user + """ + resp = self._api_call( + "/admin/resetpolicyforuser", + { + "username": username, + }, + ) + + self._raise_exception_from_command(resp) + + def set_config_setting(self, config_name: str, config_val: str) -> None: + """ + Sets a server config setting via admin + """ + resp = self._api_call( + "/admin/setconfigsetting", + {"count": 1, "param0": config_name, "value0": config_val}, + ) + + self._raise_exception_from_command(resp) + + def admin_addnewuser( + self, username: str, email: str, password: str, authtype: str = "0", status: int = 1 + ) -> None: + """ + Creates a new user to the server + # Todo: refactor with user factory ticket changes + """ + resp = self._api_call( + "/admin/adduser", + { + "username": username, + "displayname": username, + "email": email, + "password": password, + "authtype ": authtype, # 0 for default auth and 1 for AD + "status": status, # full(1), guest(0), external(3) or disabled user(2) + }, + ) + + self._raise_exception_from_command(resp) + + def admin_logout(self) -> None: + """ + Perform admin logout + """ + resp = self._api_call( + "/admin/logout", + { + "op": "logout", + }, + ) + self._raise_exception_from_command(resp) + + def add_policy(self, policy_name: str, is_default=False) -> None: + """Add a policy""" + payload = {"op": "addpolicy", "policyname": policy_name, "isdefault": is_default} + + resp = self._api_call( + "/admin/addpolicy", + payload, + ) + + self._raise_exception_from_command(resp) + + def update_policy(self, policy_id: str, config: Dict) -> None: + """Update specific policy""" + payload = { + "op": "updatepolicy", + "policyid": policy_id, + } + + payload = payload | config + + resp = self._api_call( + "/admin/updatepolicy", + payload, + ) + + self._raise_exception_from_command(resp) + + def assign_policy_to_user(self, username: str, policy_id: str) -> None: + """Add user to specific policy""" + resp = self._api_call( + "/admin/assignpolicytouser", + {"op": "assignpolicytouser", "policyid": policy_id, "username": username}, + ) + self._raise_exception_from_command(resp) + + def get_all_policies( + self, start: int = 0, limit: int = 1000, policynamefilter: Optional[str] = None + ) -> PolicyList | None: + """List all policies""" + resp = self._api_call( + "/admin/getallpolicies", + { + "op": "getallpolicies", + "start": start, + "limit": limit, + "policynamefilter": policynamefilter, + }, + ) + policies = PolicyList(resp) + return policies + + def admin_rm_policy(self, policy_id: str) -> None: + """Remove specific policy""" + resp = self._api_call( + "/admin/removepolicy", + { + "op": "removepolicy", + "policyid": policy_id, + }, + ) + self._raise_exception_from_command(resp) + + def admin_get_default_policy(self) -> PolicyList: + """Get global policy""" + resp = self._api_call("/admin/getdefaultpolicy", {"op": "getdefaultpolicy"}) + + return PolicyList(resp) + + def get_effective_policy_for_user(self, username: str) -> PolicyList: + """List all policies""" + resp = self._api_call( + "/admin/geteffectivepolicyforuser", + {"op": "geteffectivepolicyforuser", "username": username}, + ) + + self._raise_exception_from_command(resp) + + eff_policy = PolicyList(resp) + return eff_policy + + def get_teamfolder_properties(self) -> TeamFolderInfo: + """Get team folder properties""" + resp = self._api_call( + "/admin/getteamfolderproperties", + {"op": "getteamfolderproperties"}, + ) + + if resp is None: + raise ServerError("0", "Team folder properties cannot be obtained") + + tf_props = TeamFolderInfo( + teamfolderenabled=bool(resp.findtext("./teamfolderproperty/enabled", "0")), + teamfolderaccount=str(resp.findtext("./teamfolderproperty/username", "0")), + aclenabled=bool(resp.findtext("./teamfolderproperty/aclenabled", "0")), + ) + return tf_props + + def get_user_status(self, username: str) -> UserStatus: + """Retrieve the status of a user with retries. + Returns the status number or UserStatus.UNKNOWN if the status could not be retrieved. + """ + max_retries = 3 + retry_delay = 0.1 + retries = 0 + while retries < max_retries: + try: + resp = self._api_call( + "/admin/getuser", + {"op": "getuser", "username": username}, + ) + status = resp.findtext("./user/status") + if status is not None: + try: + return UserStatus(int(status)) + except ValueError: + log.error(f"Invalid status value for user {username}: {status}") + return UserStatus.UNKNOWN + else: + log.error(f"No status found for user {username} in the response.") + return UserStatus.UNKNOWN + except Exception as e: + log.exception( + f"Failed to get user {username} details (Attempt {retries + 1}/{max_retries}): {e}" + ) + retries += 1 + if retries < max_retries: + time.sleep(retry_delay) + return UserStatus.UNKNOWN + + def set_license_xml(self, license_file: Path) -> None: + """Set a license""" + license_fullpath = license_file.resolve() + + self._admin_api_call_setlicense( + "/admin/setlicensexml", + params={"op": "setlicensexml"}, + files={"file": ("license.xml", open(license_fullpath, "rb"), "text/xml")}, + ) + + def add_acl_entry( + self, + path: str, + type: AclEntryType, + value: str, + perm: AclPermissions, + flag="allow", + adminproxyuserid: Optional[str] = None, + ) -> None: + """ + Add an ACL entry to a path + """ + resp = self._api_call( + "/core/addaclentry", + params={ + "path": path, + "type": type.value, + "value": value, + "perm": str(perm), + "flag": flag, + "adminproxyuserid": adminproxyuserid if adminproxyuserid else "", + }, + ) + + self._raise_exception_from_command(resp) + + def deleteaclentry(self, path: str, value: AclPermissions, type="user") -> None: + """ + Add an ACL entry to a path + """ + resp = self._api_call( + "/core/deleteaclentry", + params={"path": path, "type": type, "value": value}, + ) + self._raise_exception_from_command(resp) + + def admin_getgroups( + self, start=0, limit=10, sortfield="", sortdir="1", everyone: Optional[bool] = False + ) -> Union[tuple[Optional[str], Optional[str]], list[tuple[Optional[str], Optional[str]]]]: + """ + List all user groups + If required, obtain the EVERYONE group only + """ + resp = self._api_call( + "/admin/getgroups", + { + "op": "getgroups", + "start": start, + "limit": limit, + "sortfield": sortfield, + "sortdir": sortdir, + }, + ) + + group_list = list[tuple[Optional[str], Optional[str]]]() + + for group_elem in resp: # type:ignore + group_id = None + group_name = None + + if group_elem is not None: + group_id_elem = group_elem.findtext("groupid") + group_name_elem = group_elem.findtext("groupname") + + if group_id_elem == None: + continue + + if group_id_elem is not None: + group_id = group_id_elem # type:ignore + + if group_name_elem is not None: + group_name = group_name_elem # type:ignore + + if everyone and group_name == "EVERYONE": + # Return the group immediately. + return (group_name, group_id) + + group_list.append((group_name, group_id)) + + return group_list + + def admin_createnewgroup(self, groupname: str): + """List all policies""" + resp = self._api_call( + "/admin/addgroup", + {"op": "addgroup", "groupname": groupname}, + ) + + if int(resp.findtext("./group/groupname", "0")) != 1: + raise ServerError("", "Failed to create group") + + def admin_groupisinshare( + self, + share: FCShare, + group_id: str, + adminproxyuserid: str, + ) -> None: + """ + Set access permissions to share + """ + + resp = self._api_call( + "/app/websharepro/getgroupaccessforshare", + { + "shareid": share.shareid, + "adminproxyuserid": adminproxyuserid, + }, + ) + + groupid_text = resp.findtext("./group/groupid") + if not groupid_text or groupid_text != group_id: + raise ServerError("", f"group {group_id} is not in shared folder '{str(share)}'") + + def admin_addgrouptoshare( + self, share: FCShare, groupid: str, adminproxyuserid: str = "" + ) -> None: + """ + Allow group access to share + """ + resp = self._api_call( + "/core/addgrouptoshare", + { + "shareid": share.shareid, + "groupid": groupid, + "adminproxyuserid": adminproxyuserid, + }, + ) + self._raise_exception_from_command(resp) + + def admin_addgrouptoexternal(self, externalid, groupid, writemode="1"): + """ + Allow group access to external + """ + resp = self._api_call( + "/admin/addgrouptoexternal", + {"groupid": groupid, "externalid": externalid, "writemode": writemode}, + ) + + self._raise_exception_from_command(resp) + + def admin_getexternals(self, start=0, end=10, filter="") -> ET.Element: + """ + Get all externals + """ + resp = self._api_call( + "/admin/getexternals", + {"start": start, "end": end, "filter": filter}, + ) + + return resp + + def admin_addmembertogroup(self, groupid: str, username: str) -> None: + """ + Add user to group + """ + resp = self._api_call( + "/admin/addmembertogroup", + { + "groupid": groupid, + "userid": username, + }, + ) + self._raise_exception_from_command(resp) + + def admin_setgroupaccessforshare( + self, + share: Optional[FCShare], + groupid: str, + adminproxyuserid: str = "", + ) -> None: + """ + Set all user permissions for share + """ + resp = self._api_call( + "/app/websharepro/setgroupaccessforshare", + { + "shareid": share.shareid if share else "false", + "groupid": groupid, + "write": "true", + "download": "true", + "share": "true", + "sync": "true", + "disallowdelete": "false", + "adminproxyuserid": adminproxyuserid, + }, + ) + self._raise_exception_from_command(resp) + + def admin_set_teamfolder_user(self, username: str) -> None: + """ + Add user to group + """ + resp = self._api_call( + "/admin/setteamfolderuser", + { + "op": "setteamfolderuser", + "username": username, + }, + ) + self._raise_exception_from_command(resp) + + def admin_addexternal( + self, + externalname: Optional[str], + type: Optional[str] = None, + location: Optional[str] = None, + automount: Optional[str] = None, + automount_type: Optional[str] = None, + automuntparam1: Optional[str] = None, + perm: Optional[str] = None, + bucket: Optional[str] = None, + region: Optional[str] = None, + key: Optional[str] = None, + secret: Optional[str] = None, + endpoint: Optional[str] = None, + toplevelprefix: Optional[str] = None, + enableenc: Optional[str] = None, + enctype: Optional[str] = None, + kmsid: Optional[str] = None, + useiamrole: Optional[str] = None, + container: Optional[str] = None, + accountkey: Optional[str] = None, + accountname: Optional[str] = None, + ) -> ET.Element: + """ + Add external to server + """ + resp = self._api_call( + "/admin/addexternal", + { + "op": "addexternal", + "externalname": externalname, + "type": type, + "location": location, + "automount": automount, + "automounttype": automount_type, + "automountparam1": automuntparam1, + "perm": perm, + "bucket": bucket, + "region": region, + "key": key, + "secret": secret, + "toplevelprefix": toplevelprefix, + "enableenc": enableenc, + "enctype": enctype, + "kmsid": kmsid, + "useiamrole": useiamrole, + "endpoint": endpoint, + "container": container, + "accountkey": accountkey, + "accountname": accountname, + }, + ) + + checkstr = " ".join(resp.itertext()).strip() + if "Name already exists" in checkstr: + externals = self.admin_getexternals() + + for ext_item in externals: # type: ignore + if ext_item.get("name") == externalname: + resp = ext_item + + return resp + + def admin_add_dlp_rule( + self, + rule_name: str = "dlp_deny_download", + str_expression: str = "dlp_deny_download", + action: str = "DOWNLOAD", + ) -> None: + """ + Add dlp deny rule for file name containing expression + Action: DOWNLOAD, LOGIN, SHARE + """ + if action == "DOWNLOAD": + expression = f"(_file.fileNameContains('{str_expression}'))" + elif action == "LOGIN": + expression = f"(_user.email == '{str_expression}')" + else: + expression = str_expression + + resp = self._api_call( + "/admin/dlpaddrule", + { + "op": "dlpaddrule", + "rulename": rule_name, + "type": "DENY", + "ispermissive": "0", + "action": action, + "expression": expression, + "ruleNotification": "", + }, + ) + message = resp.findtext("./command/message", "0") + if message not in ["Rule created successfully", "Rule name already taken"]: + raise ServerError("", f"Failed to add rule {rule_name}") + + def admin_remove_dlp_rule(self, rule_name: str) -> None: + """ + Deletes a DLP rule + """ + resp = self._api_call( + "/admin/dlpdroprule", + { + "op": "dlpdroprule", + "rulename": rule_name, + }, + ) + message = resp.findtext("./command/message", "0") + if message not in ["Rule dropped successfully", "Rule name already dropped"]: + raise ServerError("", f"Failed to delete rule {rule_name}") + + def admin_set_config_setting(self, config_setting_name: str, config_setting_value: str): + """ + Set a single config setting + + Args: + config_setting_name (str): TONIDOCLOUD_ string with setting + config_setting_value (str): value of the config key + """ + resp = self._api_call( + "/admin/setconfigsetting", + { + "op": "setconfigsetting", + "count": "1", + "param0": config_setting_name, + "value0": config_setting_value, + }, + ) + self._raise_exception_from_command(resp) + + def admin_clearrmcclients(self, username: str) -> None: + """ + Remove all RMC Clients found in admin portal associated with a user + """ + clients = self.admin_getrmcclients(username) + + for client in clients: + self.admin_removermcclient(client.remote_client_id) + + def admin_waitforrmcclient(self, username: str, rmc_count: int, maxwaits: float = 60) -> None: + """ + Wait for RMC Client count incremente by one in admin portal + """ + starttime = time.monotonic() + + while time.monotonic() - starttime < maxwaits: + rmc_clients = self.admin_getrmcclients(username) + + if rmc_clients is not None and len(rmc_clients) >= (rmc_count + 1): + return + + time.sleep(0.1) + + raise TimeoutError(f"RMC Client count not incremented after {maxwaits} seconds") + + def setuserpassword(self, username: str, new_password: str) -> None: + """ + Set new password for a user + """ + resp = self._api_call( + "/admin/setuserpassword", + { + "op": "setuserpassword", + "profile": username, + "password": new_password, + "passwordconfirm": new_password, + }, + ) + self._raise_exception_from_command(resp) + + def admin_checkshare(self, share_owner: str, share_filter: str = "", limit: int = 10) -> None: + """ + Get shares for specific user/filter if exists. + Filter can be The share location, share-name or user-name + """ + resp = self._api_call( + "/admin/getsharesbyowner", + { + "op": "getsharesbyowner", + "shareowner": share_owner, + "sharefilter": share_filter, + "limit": limit, + }, + ) + meta = resp.find("./meta") + if meta is None: + raise ValueError("No shares meta found") + + total_text = meta.findtext("./total") + if total_text is None: + raise ValueError("No shares total found") + total = int(total_text) + if total != 1: + raise ValueError(f"Expected 1 share, found {total}") + + def get_share_password(self, share: FCShare) -> str: + """ + Get share password for a public share. + """ + resp = self._api_call( + "/core/getsharepassword", + { + "op": "getsharepassword", + "shareid": share.shareid, + }, + ) + self._raise_exception_from_command(resp) + return resp.findtext("./command/message", "") + + def get_permissions_for_group(self, share: FCShare, groupid: str) -> FCShareGroup: + """ + Returns the permissions for a specific group in the share. + """ + resp = self._api_call( + "/core/getgroupaccessforshare", + {"shareid": share.shareid}, + ) + + for entry in resp.findall("./group"): + if entry.findtext("./groupid") == groupid: + group = FCShareGroup( + groupid=entry.findtext("./groupid", ""), + groupname=entry.findtext("./groupname", ""), + read=entry.findtext("./read") == "true", + write=entry.findtext("./write") == "true", + sync=entry.findtext("./sync") == "true", + share=entry.findtext("./share") == "true", + download=entry.findtext("./download") == "true", + disallowdelete=entry.findtext("./disallowdelete") == "true", + ) + return group + + raise ValueError(f"Group {groupid} not found in share {share.shareid}") + + def getuploadform(self, shareid: str) -> str: + """ + Get an upload form for a upload-only share - HTML string + """ + return self._api_call_raw("/core/getuploadform", {"shareid": shareid}) + + def get_share_activities(self, share: FCShare) -> list[ShareActivity]: + """ + Returns activities for a share. + """ + resp = self._api_call( + "/core/getshareactivityforshare", + {"shareid": share.shareid}, + ) + + entries: list[ShareActivity] = [] + + for entry in resp.findall("./shareactivities"): + share_id = entry.findtext("./shareid") + if share_id == share.shareid: + shares_acts = ShareActivity( + shareid=share_id or "", + path=entry.findtext("./path") or "", + name=entry.findtext("./name") or "", + actioncode=int(entry.findtext("./actioncode") or 0), + who=entry.findtext("./who") or "", + when=entry.findtext("./when") or "", + how=entry.findtext("./how") or "", + ip=entry.findtext("./ip") or "", + ) + entries.append(shares_acts) + + return entries diff --git a/filecloudapi/tests/__init__.py b/filecloudapi/tests/__init__.py new file mode 100644 index 0000000..219229f --- /dev/null +++ b/filecloudapi/tests/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +"""Unit tests for the package.""" diff --git a/filecloudapi/tests/conftest.py b/filecloudapi/tests/conftest.py new file mode 100644 index 0000000..ec16ed5 --- /dev/null +++ b/filecloudapi/tests/conftest.py @@ -0,0 +1,12 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +"""Unit tests configuration file.""" + +import log + + +def pytest_configure(config): + """Disable verbose output when running tests.""" + log.init(debug=True) + + terminal = config.pluginmanager.getplugin('terminal') + terminal.TerminalReporter.showfspath = False diff --git a/filecloudapi/tests/test_fcserver.py b/filecloudapi/tests/test_fcserver.py new file mode 100644 index 0000000..1f10568 --- /dev/null +++ b/filecloudapi/tests/test_fcserver.py @@ -0,0 +1,4 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. + +def test_noop(): + pass \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..514b1b7 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,19 @@ +site_name: filecloudapi-python +site_description: A Python library to connect to a Filecloud server +site_author: FileCloud + +repo_url: https://github.com/codelathe/filecloudapi-python +edit_uri: https://github.com/codelathe/filecloudapi-python/edit/main/docs + +theme: readthedocs + +markdown_extensions: + - codehilite + +nav: + - Home: index.md + - Advanced: advanced.md + - About: + - Release Notes: about/changelog.md + - Contributor Guide: about/contributing.md + - License: about/license.md diff --git a/notebooks/profile_default/.gitignore b/notebooks/profile_default/.gitignore new file mode 100644 index 0000000..9b1dffd --- /dev/null +++ b/notebooks/profile_default/.gitignore @@ -0,0 +1 @@ +*.sqlite diff --git a/notebooks/profile_default/ipython_config.py b/notebooks/profile_default/ipython_config.py new file mode 100644 index 0000000..ab5c413 --- /dev/null +++ b/notebooks/profile_default/ipython_config.py @@ -0,0 +1,618 @@ +# Configuration file for ipython. + +# ------------------------------------------------------------------------------ +# InteractiveShellApp(Configurable) configuration +# ------------------------------------------------------------------------------ + +## A Mixin for applications that start InteractiveShell instances. +# +# Provides configurables for loading extensions and executing files as part of +# configuring a Shell environment. +# +# The following methods should be called by the :meth:`initialize` method of the +# subclass: +# +# - :meth:`init_path` +# - :meth:`init_shell` (to be implemented by the subclass) +# - :meth:`init_gui_pylab` +# - :meth:`init_extensions` +# - :meth:`init_code` + +## Execute the given command string. +# c.InteractiveShellApp.code_to_run = '' + +## Run the file referenced by the PYTHONSTARTUP environment variable at IPython +# startup. +# c.InteractiveShellApp.exec_PYTHONSTARTUP = True + +## List of files to run at IPython startup. +# c.InteractiveShellApp.exec_files = [] + +## lines of code to run at IPython startup. +c.InteractiveShellApp.exec_lines = ['%autoreload 2'] + +## A list of dotted module names of IPython extensions to load. +c.InteractiveShellApp.extensions = ['autoreload'] + +## dotted module name of an IPython extension to load. +# c.InteractiveShellApp.extra_extension = '' + +## A file to be run +# c.InteractiveShellApp.file_to_run = '' + +## Enable GUI event loop integration with any of ('asyncio', 'glut', 'gtk', +# 'gtk2', 'gtk3', 'osx', 'pyglet', 'qt', 'qt4', 'qt5', 'tk', 'wx', 'gtk2', +# 'qt4'). +# c.InteractiveShellApp.gui = None + +## Should variables loaded at startup (by startup files, exec_lines, etc.) be +# hidden from tools like %who? +# c.InteractiveShellApp.hide_initial_ns = True + +## Configure matplotlib for interactive use with the default matplotlib backend. +# c.InteractiveShellApp.matplotlib = None + +## Run the module as a script. +# c.InteractiveShellApp.module_to_run = '' + +## Pre-load matplotlib and numpy for interactive use, selecting a particular +# matplotlib backend and loop integration. +# c.InteractiveShellApp.pylab = None + +## If true, IPython will populate the user namespace with numpy, pylab, etc. and +# an ``import *`` is done from numpy and pylab, when using pylab mode. +# +# When False, pylab mode should not import any names into the user namespace. +# c.InteractiveShellApp.pylab_import_all = True + +## Reraise exceptions encountered loading IPython extensions? +# c.InteractiveShellApp.reraise_ipython_extension_failures = False + +# ------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ + +## This is an application. + +## The date format used by logging formatters for %(asctime)s +# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# c.Application.log_level = 30 + +# ------------------------------------------------------------------------------ +# BaseIPythonApplication(Application) configuration +# ------------------------------------------------------------------------------ + +## IPython: an enhanced interactive Python shell. + +## Whether to create profile dir if it doesn't exist +# c.BaseIPythonApplication.auto_create = False + +## Whether to install the default config files into the profile dir. If a new +# profile is being created, and IPython contains config files for that profile, +# then they will be staged into the new directory. Otherwise, default config +# files will be automatically generated. +# c.BaseIPythonApplication.copy_config_files = False + +## Path to an extra config file to load. +# +# If specified, load this config file in addition to any other IPython config. +# c.BaseIPythonApplication.extra_config_file = '' + +## The name of the IPython directory. This directory is used for logging +# configuration (through profiles), history storage, etc. The default is usually +# $HOME/.ipython. This option can also be specified through the environment +# variable IPYTHONDIR. +# c.BaseIPythonApplication.ipython_dir = '' + +## Whether to overwrite existing config files when copying +# c.BaseIPythonApplication.overwrite = False + +## The IPython profile to use. +# c.BaseIPythonApplication.profile = 'default' + +## Create a massive crash report when IPython encounters what may be an internal +# error. The default is to append a short message to the usual traceback +# c.BaseIPythonApplication.verbose_crash = False + +# ------------------------------------------------------------------------------ +# TerminalIPythonApp(BaseIPythonApplication,InteractiveShellApp) configuration +# ------------------------------------------------------------------------------ + +## Whether to display a banner upon starting IPython. +# c.TerminalIPythonApp.display_banner = True + +## If a command or file is given via the command-line, e.g. 'ipython foo.py', +# start an interactive shell after executing the file or command. +# c.TerminalIPythonApp.force_interact = False + +## Class to use to instantiate the TerminalInteractiveShell object. Useful for +# custom Frontends +# c.TerminalIPythonApp.interactive_shell_class = 'IPython.terminal.interactiveshell.TerminalInteractiveShell' + +## Start IPython quickly by skipping the loading of config files. +# c.TerminalIPythonApp.quick = False + +# ------------------------------------------------------------------------------ +# InteractiveShell(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ + +## An enhanced, interactive shell for Python. + +## 'all', 'last', 'last_expr' or 'none', 'last_expr_or_assign' specifying which +# nodes should be run interactively (displaying output from expressions). +# c.InteractiveShell.ast_node_interactivity = 'last_expr' + +## A list of ast.NodeTransformer subclass instances, which will be applied to +# user input before code is run. +# c.InteractiveShell.ast_transformers = [] + +## Automatically run await statement in the top level repl. +# c.InteractiveShell.autoawait = True + +## Make IPython automatically call any callable object even if you didn't type +# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically. +# The value can be '0' to disable the feature, '1' for 'smart' autocall, where +# it is not applied if there are no more arguments on the line, and '2' for +# 'full' autocall, where all callable objects are automatically called (even if +# no arguments are present). +# c.InteractiveShell.autocall = 0 + +## Autoindent IPython code entered interactively. +# c.InteractiveShell.autoindent = True + +## Enable magic commands to be called without the leading %. +# c.InteractiveShell.automagic = True + +## The part of the banner to be printed before the profile +# c.InteractiveShell.banner1 = "Python 3.8.1 (default, Jan 9 2020, 14:37:22) \nType 'copyright', 'credits' or 'license' for more information\nIPython 7.12.0 -- An enhanced Interactive Python. Type '?' for help.\n" + +## The part of the banner to be printed after the profile +# c.InteractiveShell.banner2 = '' + +## Set the size of the output cache. The default is 1000, you can change it +# permanently in your config file. Setting it to 0 completely disables the +# caching system, and the minimum value accepted is 3 (if you provide a value +# less than 3, it is reset to 0 and a warning is issued). This limit is defined +# because otherwise you'll spend more time re-flushing a too small cache than +# working +# c.InteractiveShell.cache_size = 1000 + +## Use colors for displaying information about objects. Because this information +# is passed through a pager (like 'less'), and some pagers get confused with +# color codes, this capability can be turned off. +# c.InteractiveShell.color_info = True + +## Set the color scheme (NoColor, Neutral, Linux, or LightBG). +# c.InteractiveShell.colors = 'Neutral' + +## +# c.InteractiveShell.debug = False + +## Don't call post-execute functions that have failed in the past. +# c.InteractiveShell.disable_failing_post_execute = False + +## If True, anything that would be passed to the pager will be displayed as +# regular output instead. +# c.InteractiveShell.display_page = False + +## (Provisional API) enables html representation in mime bundles sent to pagers. +# c.InteractiveShell.enable_html_pager = False + +## Total length of command history +# c.InteractiveShell.history_length = 10000 + +## The number of saved history entries to be loaded into the history buffer at +# startup. +# c.InteractiveShell.history_load_length = 1000 + +## +# c.InteractiveShell.ipython_dir = '' + +## Start logging to the given file in append mode. Use `logfile` to specify a log +# file to **overwrite** logs to. +# c.InteractiveShell.logappend = '' + +## The name of the logfile to use. +# c.InteractiveShell.logfile = '' + +## Start logging to the default log file in overwrite mode. Use `logappend` to +# specify a log file to **append** logs to. +# c.InteractiveShell.logstart = False + +## Select the loop runner that will be used to execute top-level asynchronous +# code +# c.InteractiveShell.loop_runner = 'IPython.core.interactiveshell._asyncio_runner' + +## +# c.InteractiveShell.object_info_string_level = 0 + +## Automatically call the pdb debugger after every exception. +# c.InteractiveShell.pdb = False + +## Deprecated since IPython 4.0 and ignored since 5.0, set +# TerminalInteractiveShell.prompts object directly. +# c.InteractiveShell.prompt_in1 = 'In [\\#]: ' + +## Deprecated since IPython 4.0 and ignored since 5.0, set +# TerminalInteractiveShell.prompts object directly. +# c.InteractiveShell.prompt_in2 = ' .\\D.: ' + +## Deprecated since IPython 4.0 and ignored since 5.0, set +# TerminalInteractiveShell.prompts object directly. +# c.InteractiveShell.prompt_out = 'Out[\\#]: ' + +## Deprecated since IPython 4.0 and ignored since 5.0, set +# TerminalInteractiveShell.prompts object directly. +# c.InteractiveShell.prompts_pad_left = True + +## +# c.InteractiveShell.quiet = False + +## +# c.InteractiveShell.separate_in = '\n' + +## +# c.InteractiveShell.separate_out = '' + +## +# c.InteractiveShell.separate_out2 = '' + +## Show rewritten input, e.g. for autocall. +# c.InteractiveShell.show_rewritten_input = True + +## Enables rich html representation of docstrings. (This requires the docrepr +# module). +# c.InteractiveShell.sphinxify_docstring = False + +## +# c.InteractiveShell.wildcards_case_sensitive = True + +## Switch modes for the IPython exception handlers. +# c.InteractiveShell.xmode = 'Context' + +# ------------------------------------------------------------------------------ +# TerminalInteractiveShell(InteractiveShell) configuration +# ------------------------------------------------------------------------------ + +## Autoformatter to reformat Terminal code. Can be `'black'` or `None` +# c.TerminalInteractiveShell.autoformatter = None + +## Set to confirm when you try to exit IPython with an EOF (Control-D in Unix, +# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a +# direct exit without any confirmation. +# c.TerminalInteractiveShell.confirm_exit = True + +## Options for displaying tab completions, 'column', 'multicolumn', and +# 'readlinelike'. These options are for `prompt_toolkit`, see `prompt_toolkit` +# documentation for more information. +# c.TerminalInteractiveShell.display_completions = 'multicolumn' + +## Shortcut style to use at the prompt. 'vi' or 'emacs'. +# c.TerminalInteractiveShell.editing_mode = 'emacs' + +## Set the editor used by IPython (default to $EDITOR/vi/notepad). +# c.TerminalInteractiveShell.editor = 'vim' + +## Allows to enable/disable the prompt toolkit history search +# c.TerminalInteractiveShell.enable_history_search = True + +## Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. This is +# in addition to the F2 binding, which is always enabled. +# c.TerminalInteractiveShell.extra_open_editor_shortcuts = False + +## Provide an alternative handler to be called when the user presses Return. This +# is an advanced option intended for debugging, which may be changed or removed +# in later releases. +# c.TerminalInteractiveShell.handle_return = None + +## Highlight matching brackets. +# c.TerminalInteractiveShell.highlight_matching_brackets = True + +## The name or class of a Pygments style to use for syntax highlighting. To see +# available styles, run `pygmentize -L styles`. +# c.TerminalInteractiveShell.highlighting_style = traitlets.Undefined + +## Override highlighting format for specific tokens +# c.TerminalInteractiveShell.highlighting_style_overrides = {} + +## +# c.TerminalInteractiveShell.mime_renderers = {} + +## Enable mouse support in the prompt (Note: prevents selecting text with the +# mouse) +# c.TerminalInteractiveShell.mouse_support = False + +## Display the current vi mode (when using vi editing mode). +# c.TerminalInteractiveShell.prompt_includes_vi_mode = True + +## Class used to generate Prompt token for prompt_toolkit +# c.TerminalInteractiveShell.prompts_class = 'IPython.terminal.prompts.Prompts' + +## Use `raw_input` for the REPL, without completion and prompt colors. +# +# Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. +# Known usage are: IPython own testing machinery, and emacs inferior-shell +# integration through elpy. +# +# This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment +# variable is set, or the current terminal is not a tty. +# c.TerminalInteractiveShell.simple_prompt = False + +## Number of line at the bottom of the screen to reserve for the completion menu +# c.TerminalInteractiveShell.space_for_menu = 6 + +## Automatically set the terminal title +# c.TerminalInteractiveShell.term_title = True + +## Customize the terminal title format. This is a python format string. +# Available substitutions are: {cwd}. +# c.TerminalInteractiveShell.term_title_format = 'IPython: {cwd}' + +## Use 24bit colors instead of 256 colors in prompt highlighting. If your +# terminal supports true color, the following command should print 'TRUECOLOR' +# in orange: printf "\x1b[38;2;255;100;0mTRUECOLOR\x1b[0m\n" +# c.TerminalInteractiveShell.true_color = False + +# ------------------------------------------------------------------------------ +# HistoryAccessor(HistoryAccessorBase) configuration +# ------------------------------------------------------------------------------ + +## Access the history database without adding to it. +# +# This is intended for use by standalone history tools. IPython shells use +# HistoryManager, below, which is a subclass of this. + +## Options for configuring the SQLite connection +# +# These options are passed as keyword args to sqlite3.connect when establishing +# database connections. +# c.HistoryAccessor.connection_options = {} + +## enable the SQLite history +# +# set enabled=False to disable the SQLite history, in which case there will be +# no stored history, no SQLite connection, and no background saving thread. +# This may be necessary in some threaded environments where IPython is embedded. +# c.HistoryAccessor.enabled = True + +## Path to file to use for SQLite history database. +# +# By default, IPython will put the history database in the IPython profile +# directory. If you would rather share one history among profiles, you can set +# this value in each, so that they are consistent. +# +# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. +# If you see IPython hanging, try setting this to something on a local disk, +# e.g:: +# +# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite +# +# you can also use the specific value `:memory:` (including the colon at both +# end but not the back ticks), to avoid creating an history file. +# c.HistoryAccessor.hist_file = '' + +# ------------------------------------------------------------------------------ +# HistoryManager(HistoryAccessor) configuration +# ------------------------------------------------------------------------------ + +## A class to organize all history-related functionality in one place. + +## Write to database every x commands (higher values save disk access & power). +# Values of 1 or less effectively disable caching. +# c.HistoryManager.db_cache_size = 0 + +## Should the history database include output? (default: no) +# c.HistoryManager.db_log_output = False + +# ------------------------------------------------------------------------------ +# ProfileDir(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## An object to manage the profile directory and its resources. +# +# The profile directory is used by all IPython applications, to manage +# configuration, logging and security. +# +# This object knows how to find, create and manage these directories. This +# should be used by any code that wants to handle profiles. + +## Set the profile location directly. This overrides the logic used by the +# `profile` option. +# c.ProfileDir.location = '' + +# ------------------------------------------------------------------------------ +# BaseFormatter(Configurable) configuration +# ------------------------------------------------------------------------------ + +## A base formatter class that is configurable. +# +# This formatter should usually be used as the base class of all formatters. It +# is a traited :class:`Configurable` class and includes an extensible API for +# users to determine how their objects are formatted. The following logic is +# used to find a function to format an given object. +# +# 1. The object is introspected to see if it has a method with the name +# :attr:`print_method`. If is does, that object is passed to that method +# for formatting. +# 2. If no print method is found, three internal dictionaries are consulted +# to find print method: :attr:`singleton_printers`, :attr:`type_printers` +# and :attr:`deferred_printers`. +# +# Users should use these dictionaries to register functions that will be used to +# compute the format data for their objects (if those objects don't have the +# special print methods). The easiest way of using these dictionaries is through +# the :meth:`for_type` and :meth:`for_type_by_name` methods. +# +# If no function/callable is found to compute the format data, ``None`` is +# returned and this format type is not used. + +## +# c.BaseFormatter.deferred_printers = {} + +## +# c.BaseFormatter.enabled = True + +## +# c.BaseFormatter.singleton_printers = {} + +## +# c.BaseFormatter.type_printers = {} + +# ------------------------------------------------------------------------------ +# PlainTextFormatter(BaseFormatter) configuration +# ------------------------------------------------------------------------------ + +## The default pretty-printer. +# +# This uses :mod:`IPython.lib.pretty` to compute the format data of the object. +# If the object cannot be pretty printed, :func:`repr` is used. See the +# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty +# printers. Here is a simple example:: +# +# def dtype_pprinter(obj, p, cycle): +# if cycle: +# return p.text('dtype(...)') +# if hasattr(obj, 'fields'): +# if obj.fields is None: +# p.text(repr(obj)) +# else: +# p.begin_group(7, 'dtype([') +# for i, field in enumerate(obj.descr): +# if i > 0: +# p.text(',') +# p.breakable() +# p.pretty(field) +# p.end_group(7, '])') + +## +# c.PlainTextFormatter.float_precision = '' + +## Truncate large collections (lists, dicts, tuples, sets) to this size. +# +# Set to 0 to disable truncation. +# c.PlainTextFormatter.max_seq_length = 1000 + +## +# c.PlainTextFormatter.max_width = 79 + +## +# c.PlainTextFormatter.newline = '\n' + +## +# c.PlainTextFormatter.pprint = True + +## +# c.PlainTextFormatter.verbose = False + +# ------------------------------------------------------------------------------ +# Completer(Configurable) configuration +# ------------------------------------------------------------------------------ + +## Enable unicode completions, e.g. \alpha . Includes completion of latex +# commands, unicode names, and expanding unicode characters back to latex +# commands. +# c.Completer.backslash_combining_completions = True + +## Enable debug for the Completer. Mostly print extra information for +# experimental jedi integration. +# c.Completer.debug = False + +## Activate greedy completion PENDING DEPRECTION. this is now mostly taken care +# of with Jedi. +# +# This will enable completion on elements of lists, results of function calls, +# etc., but can be unsafe because the code is actually evaluated on TAB. +# c.Completer.greedy = False + +## Experimental: restrict time (in milliseconds) during which Jedi can compute +# types. Set to 0 to stop computing types. Non-zero value lower than 100ms may +# hurt performance by preventing jedi to build its cache. +# c.Completer.jedi_compute_type_timeout = 400 + +## Experimental: Use Jedi to generate autocompletions. Default to True if jedi is +# installed. +# c.Completer.use_jedi = True + +# ------------------------------------------------------------------------------ +# IPCompleter(Completer) configuration +# ------------------------------------------------------------------------------ + +## Extension of the completer class with IPython-specific features + +## DEPRECATED as of version 5.0. +# +# Instruct the completer to use __all__ for the completion +# +# Specifically, when completing on ``object.``. +# +# When True: only those names in obj.__all__ will be included. +# +# When False [default]: the __all__ attribute is ignored +# c.IPCompleter.limit_to__all__ = False + +## Whether to merge completion results into a single list +# +# If False, only the completion results from the first non-empty completer will +# be returned. +# c.IPCompleter.merge_completions = True + +## Instruct the completer to omit private method names +# +# Specifically, when completing on ``object.``. +# +# When 2 [default]: all names that start with '_' will be excluded. +# +# When 1: all 'magic' names (``__foo__``) will be excluded. +# +# When 0: nothing will be excluded. +# c.IPCompleter.omit__names = 2 + +# ------------------------------------------------------------------------------ +# ScriptMagics(Magics) configuration +# ------------------------------------------------------------------------------ + +## Magics for talking to scripts +# +# This defines a base `%%script` cell magic for running a cell with a program in +# a subprocess, and registers a few top-level magics that call %%script with +# common interpreters. + +## Extra script cell magics to define +# +# This generates simple wrappers of `%%script foo` as `%%foo`. +# +# If you want to add script magics that aren't on your path, specify them in +# script_paths +# c.ScriptMagics.script_magics = [] + +## Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby' +# +# Only necessary for items in script_magics where the default path will not find +# the right interpreter. +# c.ScriptMagics.script_paths = {} + +# ------------------------------------------------------------------------------ +# LoggingMagics(Magics) configuration +# ------------------------------------------------------------------------------ + +## Magics related to all logging machinery. + +## Suppress output of log state when logging is enabled +# c.LoggingMagics.quiet = False + +# ------------------------------------------------------------------------------ +# StoreMagics(Magics) configuration +# ------------------------------------------------------------------------------ + +## Lightweight persistence for python variables. +# +# Provides the %store magic. + +## If True, any %store-d variables will be automatically restored when IPython +# starts. +# c.StoreMagics.autorestore = False diff --git a/notebooks/profile_default/startup/README b/notebooks/profile_default/startup/README new file mode 100644 index 0000000..61d4700 --- /dev/null +++ b/notebooks/profile_default/startup/README @@ -0,0 +1,11 @@ +This is the IPython startup directory + +.py and .ipy files in this directory will be run *prior* to any code or files specified +via the exec_lines or exec_files configurables whenever you load this profile. + +Files will be run in lexicographical order, so you can control the execution order of files +with a prefix, e.g.:: + + 00-first.py + 50-middle.py + 99-last.ipy diff --git a/notebooks/profile_default/startup/ipython_startup.py b/notebooks/profile_default/startup/ipython_startup.py new file mode 100644 index 0000000..e69de29 diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..73a151b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1644 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "altgraph" +version = "0.17.4" +description = "Python graph (network) package" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"}, + {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, +] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "astroid" +version = "2.13.5" +description = "An abstract syntax tree for Python with inference support." +category = "dev" +optional = false +python-versions = ">=3.7.2" +files = [ + {file = "astroid-2.13.5-py3-none-any.whl", hash = "sha256:6891f444625b6edb2ac798829b689e95297e100ddf89dbed5a8c610e34901501"}, + {file = "astroid-2.13.5.tar.gz", hash = "sha256:df164d5ac811b9f44105a72b8f9d5edfb7b5b2d7e979b04ea377a77b3229114a"}, +] + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "coveragespace" +version = "6.1" +description = "A place to track your code coverage metrics." +category = "dev" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "coveragespace-6.1-py3-none-any.whl", hash = "sha256:ca6ccd5eb32eb6ce5fe78de6c052353b9fbb378a886fde0838480defe33406a8"}, + {file = "coveragespace-6.1.tar.gz", hash = "sha256:049c0b7b629ad43d72692f0f99b9f8a97936ad596f7f27c1af61323fba90ebef"}, +] + +[package.dependencies] +colorama = ">=0.4" +coverage = ">=4.0" +docopt = ">=0.6" +minilog = ">=2.0" +requests = ">=2.28,<3.0" + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + +[[package]] +name = "freezegun" +version = "1.5.1" +description = "Let your Python tests travel through time" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, + {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipython" +version = "7.34.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipython-7.34.0-py3-none-any.whl", hash = "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e"}, + {file = "ipython-7.34.0.tar.gz", hash = "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +setuptools = ">=18.5" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["ipykernel", "nbformat", "nose (>=0.10.1)", "numpy (>=1.17)", "pygments", "requests", "testpath"] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.10.0" +description = "A fast and thorough lazy object proxy." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, +] + +[[package]] +name = "macfsevents" +version = "0.8.4" +description = "Thread-based interface to file system observation primitives." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "MacFSEvents-0.8.4.tar.gz", hash = "sha256:bf7283f1d517764ccdc8195b21631dbbac1c506b920bf9a8ea2956b3127651cb"}, +] + +[[package]] +name = "macholib" +version = "1.16.3" +description = "Mach-O header analysis and editing" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"}, + {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"}, +] + +[package.dependencies] +altgraph = ">=0.17" + +[[package]] +name = "markdown" +version = "3.3.7" +description = "Python implementation of Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +] + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "minilog" +version = "2.3.1" +description = "Minimalistic wrapper for Python logging." +category = "dev" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "minilog-2.3.1-py3-none-any.whl", hash = "sha256:1a679fefe6140ce1d59c3246adc991f9eb480169e5a6c54d2be9023ee459dc30"}, + {file = "minilog-2.3.1.tar.gz", hash = "sha256:4b602572c3bcdd2d8f00d879f635c0de9e632d5d0307e131c91074be8acf444e"}, +] + +[[package]] +name = "mkdocs" +version = "1.3.1" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, + {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, +] + +[package.dependencies] +click = ">=3.3" +ghp-import = ">=1.0" +importlib-metadata = ">=4.3" +Jinja2 = ">=2.10.2" +Markdown = ">=3.2.1,<3.4" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +PyYAML = ">=3.10" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] + +[[package]] +name = "mypy" +version = "1.11.2" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nose" +version = "1.3.7" +description = "nose extends unittest to make testing easier" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"}, + {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"}, + {file = "nose-1.3.7.tar.gz", hash = "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pefile" +version = "2024.8.26" +description = "Python PE parsing module" +category = "dev" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "pefile-2024.8.26-py3-none-any.whl", hash = "sha256:76f8b485dcd3b1bb8166f1128d395fa3d87af26360c2358fb75b80019b957c6f"}, + {file = "pefile-2024.8.26.tar.gz", hash = "sha256:3ff6c5d8b43e8c37bb6e6dd5085658d658a7a0bdcd20b6a07b1fcfc1c4e9d632"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyinstaller" +version = "4.5.1" +description = "PyInstaller bundles a Python application and all its dependencies into a single package." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyinstaller-4.5.1-py3-none-macosx_10_13_universal2.whl", hash = "sha256:ecc2baadeeefd2b6fbf39d13c65d4aa603afdda1c6aaaebc4577ba72893fee9e"}, + {file = "pyinstaller-4.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4d848cd782ee0893d7ad9fe2bfe535206a79f0b6760cecc5f2add831258b9322"}, + {file = "pyinstaller-4.5.1-py3-none-manylinux2014_i686.whl", hash = "sha256:8f747b190e6ad30e2d2fd5da9a64636f61aac8c038c0b7f685efa92c782ea14f"}, + {file = "pyinstaller-4.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c587da8f521a7ce1b9efb4e3d0117cd63c92dc6cedff24590aeef89372f53012"}, + {file = "pyinstaller-4.5.1-py3-none-win32.whl", hash = "sha256:fed9f5e4802769a416a8f2ca171c6be961d1861cc05a0b71d20dfe05423137e9"}, + {file = "pyinstaller-4.5.1-py3-none-win_amd64.whl", hash = "sha256:aae456205c68355f9597411090576bb31b614e53976b4c102d072bbe5db8392a"}, + {file = "pyinstaller-4.5.1.tar.gz", hash = "sha256:30733baaf8971902286a0ddf77e5499ac5f7bf8e7c39163e83d4f8c696ef265e"}, +] + +[package.dependencies] +altgraph = "*" +macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} +pefile = {version = ">=2017.8.1", markers = "sys_platform == \"win32\""} +pyinstaller-hooks-contrib = ">=2020.6" +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +setuptools = "*" + +[package.extras] +encryption = ["tinyaes (>=1.0.0)"] +hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] + +[[package]] +name = "pyinstaller-hooks-contrib" +version = "2024.8" +description = "Community maintained hooks for PyInstaller" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyinstaller_hooks_contrib-2024.8-py3-none-any.whl", hash = "sha256:0057fe9a5c398d3f580e73e58793a1d4a8315ca91c3df01efea1c14ed557825a"}, + {file = "pyinstaller_hooks_contrib-2024.8.tar.gz", hash = "sha256:29b68d878ab739e967055b56a93eb9b58e529d5b054fbab7a2f2bacf80cef3e2"}, +] + +[package.dependencies] +packaging = ">=22.0" +setuptools = ">=42.0.0" + +[[package]] +name = "pylint" +version = "2.15.10" +description = "python code static checker" +category = "dev" +optional = false +python-versions = ">=3.7.2" +files = [ + {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, + {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, +] + +[package.dependencies] +astroid = ">=2.12.13,<=2.14.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.3.6", markers = "python_version >= \"3.11\""} +isort = ">=4.2.5,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pylint-pytest" +version = "1.1.7" +description = "A Pylint plugin to suppress pytest-related false positives." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pylint-pytest-1.1.7.tar.gz", hash = "sha256:7a38be02c014eb6d98791eb978e79ed292f1904d3a518289c6d7ac4fb4122e98"}, + {file = "pylint_pytest-1.1.7-py3-none-any.whl", hash = "sha256:5d687a2f4b17e85654fc2a8f04944761efb11cb15dc46d008f420c377b149151"}, +] + +[package.dependencies] +pylint = ">=2" +pytest = ">=4.6" + +[[package]] +name = "pync" +version = "2.0.3" +description = "Python Wrapper for Mac OS 10.10 Notification Center" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pync-2.0.3.tar.gz", hash = "sha256:38b9e61735a3161f9211a5773c5f5ea698f36af4ff7f77fa03e8d1ff0caa117f"}, +] + +[package.dependencies] +python-dateutil = ">=2.0" + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-describe" +version = "2.2.0" +description = "Describe-style plugin for pytest" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, + {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, +] + +[package.dependencies] +pytest = ">=4.6,<9" + +[[package]] +name = "pytest-expecter" +version = "3.0" +description = "Better testing with expecter and pytest." +category = "dev" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest-expecter-3.0.tar.gz", hash = "sha256:be8f3e9f823af6d6713e3f552ed47560061a2fd243a78952180f5df61a2b76a4"}, + {file = "pytest_expecter-3.0-py3-none-any.whl", hash = "sha256:98fe65ecc1ddb7ca29084dc68ec07983dbbdb20b566fd14140b0b5f4b7c84cc8"}, +] + +[[package]] +name = "pytest-random" +version = "0.02" +description = "py.test plugin to randomize tests" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pytest-random-0.02.tar.gz", hash = "sha256:92f25db8c5d9ffc20d90b51997b914372d6955cb9cf1f6ead45b90514fc0eddd"}, +] + +[package.dependencies] +pytest = ">=2.2.3" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-termstyle" +version = "0.1.10" +description = "console colouring for python" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "python-termstyle-0.1.10.tar.gz", hash = "sha256:f42a6bb16fbfc5e2c66d553e7ad46524ea833872f75ee5d827c15115fafc94e2"}, + {file = "python-termstyle-0.1.10.tgz", hash = "sha256:6faf42ba42f2826c38cf70dacb3ac51f248a418e48afc0e36593df11cf3ab1d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, + {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "75.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.11.0,<1.12.0)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffer" +version = "0.4.1" +description = "An automatic test runner. Supports nose out of the box." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "sniffer-0.4.1-py2.py3-none-any.whl", hash = "sha256:f120843fe152d0e380402fc11313b151e2044c47fdd36895de2efedc8624dbb8"}, + {file = "sniffer-0.4.1.tar.gz", hash = "sha256:b37665053fb83d7790bf9e51d616c11970863d14b5ea5a51155a4e95759d1529"}, +] + +[package.dependencies] +colorama = "*" +nose = "*" +python-termstyle = "*" + +[package.extras] +growl = ["gntp (==0.7)"] +libnotify = ["py-notify (==0.3.1)"] +linux = ["pyinotify (==0.9.0)"] +osx = ["MacFSEvents (==0.2.8)"] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "watchdog" +version = "5.0.2" +description = "Filesystem events monitoring" +category = "dev" +optional = false +python-versions = ">=3.9" +files = [ + {file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d961f4123bb3c447d9fcdcb67e1530c366f10ab3a0c7d1c0c9943050936d4877"}, + {file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72990192cb63872c47d5e5fefe230a401b87fd59d257ee577d61c9e5564c62e5"}, + {file = "watchdog-5.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6bec703ad90b35a848e05e1b40bf0050da7ca28ead7ac4be724ae5ac2653a1a0"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dae7a1879918f6544201d33666909b040a46421054a50e0f773e0d870ed7438d"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c4a440f725f3b99133de610bfec93d570b13826f89616377715b9cd60424db6e"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8b2918c19e0d48f5f20df458c84692e2a054f02d9df25e6c3c930063eca64c1"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aa9cd6e24126d4afb3752a3e70fce39f92d0e1a58a236ddf6ee823ff7dba28ee"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f627c5bf5759fdd90195b0c0431f99cff4867d212a67b384442c51136a098ed7"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d7594a6d32cda2b49df3fd9abf9b37c8d2f3eab5df45c24056b4a671ac661619"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba32efcccfe2c58f4d01115440d1672b4eb26cdd6fc5b5818f1fb41f7c3e1889"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:963f7c4c91e3f51c998eeff1b3fb24a52a8a34da4f956e470f4b068bb47b78ee"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8c47150aa12f775e22efff1eee9f0f6beee542a7aa1a985c271b1997d340184f"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14dd4ed023d79d1f670aa659f449bcd2733c33a35c8ffd88689d9d243885198b"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b84bff0391ad4abe25c2740c7aec0e3de316fdf7764007f41e248422a7760a7f"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e8d5ff39f0a9968952cce548e8e08f849141a4fcc1290b1c17c032ba697b9d7"}, + {file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fb223456db6e5f7bd9bbd5cd969f05aae82ae21acc00643b60d81c770abd402b"}, + {file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9814adb768c23727a27792c77812cf4e2fd9853cd280eafa2bcfa62a99e8bd6e"}, + {file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:901ee48c23f70193d1a7bc2d9ee297df66081dd5f46f0ca011be4f70dec80dab"}, + {file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:638bcca3d5b1885c6ec47be67bf712b00a9ab3d4b22ec0881f4889ad870bc7e8"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5597c051587f8757798216f2485e85eac583c3b343e9aa09127a3a6f82c65ee8"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:53ed1bf71fcb8475dd0ef4912ab139c294c87b903724b6f4a8bd98e026862e6d"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:29e4a2607bd407d9552c502d38b45a05ec26a8e40cc7e94db9bb48f861fa5abc"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:b6dc8f1d770a8280997e4beae7b9a75a33b268c59e033e72c8a10990097e5fde"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:d2ab34adc9bf1489452965cdb16a924e97d4452fcf88a50b21859068b50b5c3b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:7d1aa7e4bb0f0c65a1a91ba37c10e19dabf7eaaa282c5787e51371f090748f4b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:726eef8f8c634ac6584f86c9c53353a010d9f311f6c15a034f3800a7a891d941"}, + {file = "watchdog-5.0.2-py3-none-win32.whl", hash = "sha256:bda40c57115684d0216556671875e008279dea2dc00fcd3dde126ac8e0d7a2fb"}, + {file = "watchdog-5.0.2-py3-none-win_amd64.whl", hash = "sha256:d010be060c996db725fbce7e3ef14687cdcc76f4ca0e4339a68cc4532c382a73"}, + {file = "watchdog-5.0.2-py3-none-win_ia64.whl", hash = "sha256:3960136b2b619510569b90f0cd96408591d6c251a75c97690f4553ca88889769"}, + {file = "watchdog-5.0.2.tar.gz", hash = "sha256:dcebf7e475001d2cdeb020be630dc5b687e9acdd60d16fea6bb4508e7b94cf76"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "69265f0ba4da009a80795e3f42a7d9807b679ef020222bfb92562a261686477d" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5eaa8bd --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,108 @@ +[tool.poetry] + +name = "filecloudapi-python" +version = "0.1" +description = "A Python library to connect to a Filecloud server" + +packages = [{ include = "filecloudapi" }] + +license = "MIT" +authors = ["FileCloud "] + +readme = "README.md" +homepage = "https://pypi.org/project/filecloudapi-python" +documentation = "https://filecloudapi-python.readthedocs.io" +repository = "https://github.com/codelathe/filecloudapi-python" + +keywords = [ +] +classifiers = [ + "Development Status :: 4 - Beta", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: Apache Software License" +] + +[tool.poetry.dependencies] + +python = "^3.11" + +click = "*" +requests = "*" + +[tool.poetry.dev-dependencies] + +# Formatters +black = "^22.1" +tomli = "*" # missing 'black' dependency +isort = "^5.10" + +# Linters +mypy = "^1.0" +pydocstyle = "^6.1" +pylint = "~2.15" +pylint-pytest = "*" +wrapt = "*" # missing 'pylint' dependency + +# Testing +pytest = "^8.1" +pytest-describe = "^2.0" +pytest-expecter = "^3.0" +pytest-random = "*" +pytest-cov = "^4.1" +freezegun = "*" + +# Reports +coveragespace = "^6.1" + +# Documentation +mkdocs = "~1.3" +pygments = "^2.11.1" + +# Tooling +pyinstaller = "*" +sniffer = "*" +MacFSEvents = { version = "*", platform = "darwin" } +pync = { version = "*", platform = "darwin" } +ipython = "^7.12.0" + +[tool.black] + +quiet = true + +[tool.isort] + +profile = "black" + +[tool.mypy] + +ignore_missing_imports = true +no_implicit_optional = true +check_untyped_defs = true + +cache_dir = ".cache/mypy/" + +[tool.pytest.ini_options] + +addopts = """ +--strict-markers + +-r sxX +--show-capture=log + +--cov-report=html +--cov-report=term-missing:skip-covered +--no-cov-on-fail +""" + +cache_dir = ".cache/pytest/" + +markers = [] + +[build-system] + +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/scent.py b/scent.py new file mode 100644 index 0000000..2645da4 --- /dev/null +++ b/scent.py @@ -0,0 +1,96 @@ +"""Configuration file for sniffer.""" + +import time +import subprocess + +from sniffer.api import select_runnable, file_validator, runnable +try: + from pync import Notifier +except ImportError: + notify = None +else: + notify = Notifier.notify + + +watch_paths = ["filecloudapi", "tests"] + + +class Options: + group = int(time.time()) # unique per run + show_coverage = False + rerun_args = None + + targets = [ + (("make", "test-unit", "DISABLE_COVERAGE=true"), "Unit Tests", True), + (("make", "test-all"), "Integration Tests", False), + (("make", "check"), "Static Analysis", True), + (("make", "docs", "CI=true"), None, True), + ] + + +@select_runnable("run_targets") +@file_validator +def python_files(filename): + return filename.endswith(".py") and ".py." not in filename + + +@select_runnable("run_targets") +@file_validator +def html_files(filename): + return filename.split(".")[-1] in ["html", "css", "js"] + + +@runnable +def run_targets(*args): + """Run targets for Python.""" + Options.show_coverage = "coverage" in args + + count = 0 + for count, (command, title, retry) in enumerate(Options.targets, start=1): + + success = call(command, title, retry) + if not success: + message = "✅ " * (count - 1) + "❌" + show_notification(message, title) + + return False + + message = "✅ " * count + title = "All Targets" + show_notification(message, title) + show_coverage() + + return True + + +def call(command, title, retry): + """Run a command-line program and display the result.""" + if Options.rerun_args: + command, title, retry = Options.rerun_args + Options.rerun_args = None + success = call(command, title, retry) + if not success: + return False + + print("") + print("$ %s" % " ".join(command)) + failure = subprocess.call(command) + + if failure and retry: + Options.rerun_args = command, title, retry + + return not failure + + +def show_notification(message, title): + """Show a user notification.""" + if notify and title: + notify(message, title=title, group=Options.group) + + +def show_coverage(): + """Launch the coverage report.""" + if Options.show_coverage: + subprocess.call(["make", "read-coverage"]) + + Options.show_coverage = False diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..7669521 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +"""Integration tests for the package.""" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..84561c6 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +# Copyright (c) 2024 FileCloud. All Rights Reserved. +"""Integration tests configuration file.""" + +# pylint: disable=unused-import + +from filecloudapi.tests.conftest import pytest_configure