From 2e07adc56a340486212e945052b813894fe3c243 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 11:08:21 -0500 Subject: [PATCH 1/9] Add ruff configuration. --- .pre-commit-config.yaml | 18 ++++----------- .ruff.toml | 50 +++++++++++++++++++++++++++++++++++++++++ setup.cfg | 42 ---------------------------------- 3 files changed, 54 insertions(+), 56 deletions(-) create mode 100644 .ruff.toml delete mode 100644 setup.cfg diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 38e260ba..7d4e39d5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -42,21 +42,11 @@ repos: - --add=Part of GSD, released under the BSD 2-Clause License. - --keep-after=.. include - --comment-prefix=.. -- repo: https://github.com/google/yapf - rev: 'v0.40.2' +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: 'v0.1.13' hooks: - - id: yapf - additional_dependencies: - - toml==0.10.2 -- repo: https://github.com/PyCQA/flake8 - rev: '6.1.0' - hooks: - - id: flake8 - additional_dependencies: - - pep8-naming==0.13.3 - - pydocstyle==6.2.3 - - flake8-docstrings==1.6.0 - - flake8-rst-docstrings==0.3.0 + - id: ruff-format + - id: ruff - repo: https://github.com/pre-commit/mirrors-clang-format rev: v17.0.6 hooks: diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 00000000..b99500d1 --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,50 @@ +target-version = "py38" + +extend-select = [ + "A", + "B", + "D", + "E501", + "EM", + "I", + "ICN", + "ISC", + "N", + "NPY", + "PL", + "PT", + "RET", + "RUF", + "UP", + "W", +] + +ignore = [ + "N806", "N803", # Allow occasional use of uppercase variable and argument names (e.g. N). + "D107", # Do not document __init__ separately from the class. + "PLR09", # Allow "too many" statements/arguments/etc... + "N816", # Allow mixed case names like kT. +] + +[lint.per-file-ignores] + +"__init__.py" = ["F401", # __init__.py import submodules for use by the package importer. +] + +[pydocstyle] +convention = "google" + +[format] +quote-style = "single" + +[lint.flake8-import-conventions] +# Prefer no import aliases +aliases = {} +# Always import hoomd and gsd without 'from' +banned-from = ["hoomd", "gsd"] + +# Ban standard import conventions and force common packages to be imported by their actual name. +[lint.flake8-import-conventions.banned-aliases] +"numpy" = ["np"] +"pandas" = ["pd"] +"matplotlib" = ["mpl"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 507bb3f9..00000000 --- a/setup.cfg +++ /dev/null @@ -1,42 +0,0 @@ -[flake8] -select = E,F,W,N,D,RST -ignore = W503,N806,N803,E126,E133,D107 -per-file-ignores = - setup.py:D -exclude = .git, - __pycache__, - doc/conf.py, - run-clang-format.py, - build, - dist -max_line_length = 80 -max_doc_length = 80 -hang_closing = True -docstring-convention=google -rst-directives = - program, - option, - deprecated, - ipython, -rst-roles = - file, - py:mod, - py:class, - py:meth, - py:func, - mod, - class, - meth, - func, - chunk, - ref, - py:exc, - -[yapf] -based_on_style = google -align_closing_bracket_with_visual_indent = True -split_before_arithmetic_operator = True -split_before_bitwise_operator = True -split_before_logical_operator = True -blank_line_before_module_docstring = True -split_before_dot = True From 8c79e688757135553de30c6c7b6ae16f2dbe6a3c Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 11:18:14 -0500 Subject: [PATCH 2/9] Apply safe fixes. --- doc/conf.py | 7 ++++--- gsd/__init__.py | 3 ++- gsd/__main__.py | 11 +++++------ gsd/hoomd.py | 22 +++++++++++----------- gsd/pygsd.py | 16 +++++++--------- gsd/test/conftest.py | 3 ++- gsd/test/test_fl.py | 14 ++++++++------ gsd/test/test_hoomd.py | 20 +++++++++++--------- gsd/test/test_largefile.py | 8 +++++--- scripts/benchmark-hoomd.py | 16 +++++++++------- setup.py | 4 ++-- 11 files changed, 66 insertions(+), 58 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index e4615250..2e22a864 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -2,12 +2,13 @@ # Copyright (c) 2016-2023 The Regents of the University of Michigan # Part of GSD, released under the BSD 2-Clause License. +import datetime import os import subprocess -import gsd -import datetime import tempfile +import gsd + extensions = [ 'breathe', 'sphinx.ext.autodoc', @@ -24,7 +25,7 @@ 'numpy': ('https://numpy.org/doc/stable', None), 'hoomd': ('https://hoomd-blue.readthedocs.io/en/latest/', None), } -autodoc_docstring_signature = True; +autodoc_docstring_signature = True templates_path = ['_templates'] diff --git a/gsd/__init__.py b/gsd/__init__.py index 877ac7f1..04fe0ee2 100644 --- a/gsd/__init__.py +++ b/gsd/__init__.py @@ -14,7 +14,8 @@ import signal import sys -from . import version # noqa: F401 + +from . import version # Install a SIGTERM handler that gracefully exits, allowing open files to flush # buffered writes and close. Catch ValueError and pass as there is no way to diff --git a/gsd/__main__.py b/gsd/__main__.py index 7d85c3da..1e3e1f96 100644 --- a/gsd/__main__.py +++ b/gsd/__main__.py @@ -24,13 +24,12 @@ accepted by :func:`gsd.fl.open`. """ -import sys import argparse import code +import sys -from . import version +from . import fl, version from .hoomd import open as hoomd_open -from . import fl def _print_err(msg=None, *args): @@ -76,7 +75,7 @@ def main_read(args): }) extras = "\n".join( - "{}: {}".format(key, val) for key, val in attributes.items()) + f"{key}: {val}" for key, val in attributes.items()) code.interact(local=local_ns, banner=SHELL_BANNER.format(python_version=sys.version, @@ -158,12 +157,12 @@ def main(): raise sys.exit(1) except RuntimeWarning as warning: - _print_err("Warning: {}".format(warning)) + _print_err(f"Warning: {warning}") if args.debug: raise sys.exit(1) except Exception as error: - _print_err('Error: {}'.format(error)) + _print_err(f'Error: {error}') if args.debug: raise sys.exit(1) diff --git a/gsd/hoomd.py b/gsd/hoomd.py index 3fd1a08a..f2a17654 100644 --- a/gsd/hoomd.py +++ b/gsd/hoomd.py @@ -20,11 +20,12 @@ See :ref:`hoomd-examples` for full examples. """ -import numpy -from collections import OrderedDict -import logging import json +import logging import warnings +from collections import OrderedDict + +import numpy try: from gsd import fl @@ -39,7 +40,7 @@ logger = logging.getLogger('gsd.hoomd') -class ConfigurationData(object): +class ConfigurationData: """Store configuration data. Use the `Frame.configuration` attribute of a to access the configuration. @@ -105,7 +106,7 @@ def validate(self): self.box = self.box.reshape([6]) -class ParticleData(object): +class ParticleData: """Store particle data chunks. Use the `Frame.particles` attribute of a to access the particles. @@ -256,7 +257,7 @@ def validate(self): raise ValueError("Type names must be unique.") -class BondData(object): +class BondData: """Store bond data chunks. Use the `Frame.bonds`, `Frame.angles`, `Frame.dihedrals`, @@ -272,7 +273,6 @@ class BondData(object): data. Note: - *M* varies depending on the type of bond. `BondData` represents all types of topology connections. @@ -348,7 +348,7 @@ def validate(self): raise ValueError("Type names must be unique.") -class ConstraintData(object): +class ConstraintData: """Store constraint data. Use the `Frame.constraints` attribute to access the constraints. @@ -632,7 +632,7 @@ def validate(self): raise RuntimeError('Not a valid state: ' + k) -class _HOOMDTrajectoryIterable(object): +class _HOOMDTrajectoryIterable: """Iterable over a HOOMDTrajectory object.""" def __init__(self, trajectory, indices): @@ -652,7 +652,7 @@ def __len__(self): return len(self._indices) -class _HOOMDTrajectoryView(object): +class _HOOMDTrajectoryView: """A view of a HOOMDTrajectory object. Enables the slicing and iteration over a subset of a trajectory @@ -676,7 +676,7 @@ def __getitem__(self, key): return self._trajectory[self._indices[key]] -class HOOMDTrajectory(object): +class HOOMDTrajectory: """Read and write hoomd gsd files. Args: diff --git a/gsd/pygsd.py b/gsd/pygsd.py index c21f4c20..2489bec4 100644 --- a/gsd/pygsd.py +++ b/gsd/pygsd.py @@ -29,13 +29,12 @@ """ -from __future__ import print_function -from __future__ import division import logging -import numpy import struct -from collections import namedtuple import sys +from collections import namedtuple + +import numpy version = "3.2.0" @@ -68,7 +67,7 @@ } -class GSDFile(object): +class GSDFile: """GSD file access interface. Implemented in pure Python and accepts any Python file-like object. @@ -117,7 +116,7 @@ def __init__(self, file): raise if len(header_raw) != gsd_header_struct.size: - raise IOError + raise OSError self.__header = gsd_header._make(gsd_header_struct.unpack(header_raw)) @@ -157,7 +156,7 @@ def __init__(self, file): for i in range(self.__header.index_allocated_entries): index_entry_raw = self.__file.read(gsd_index_entry_struct.size) if len(index_entry_raw) != gsd_index_entry_struct.size: - raise IOError + raise OSError idx = gsd_index_entry._make( gsd_index_entry_struct.unpack(index_entry_raw)) @@ -266,7 +265,6 @@ def chunk_exists(self, frame, name): bool: True if the chunk exists in the file. False if it does not. Example: - Handle non-existent chunks:: with GSDFile(open('file.gsd', mode='r')) as f: @@ -342,7 +340,7 @@ def read_chunk(self, frame, name): data_raw = self.__file.read(size) if len(data_raw) != size: - raise IOError + raise OSError data_npy = numpy.frombuffer(data_raw, dtype=gsd_type_mapping[chunk.type]) diff --git a/gsd/test/conftest.py b/gsd/test/conftest.py index b791ed59..b6f71586 100644 --- a/gsd/test/conftest.py +++ b/gsd/test/conftest.py @@ -3,9 +3,10 @@ """Pytest fixtures common to all tests.""" -import pytest import collections +import pytest + Mode = collections.namedtuple('Mode', 'read write') mode_list = [Mode('r', 'w'), Mode('a', 'x'), Mode('r', 'a')] diff --git a/gsd/test/test_fl.py b/gsd/test/test_fl.py index 2493cfcb..d538658f 100644 --- a/gsd/test/test_fl.py +++ b/gsd/test/test_fl.py @@ -3,17 +3,19 @@ """Test gsd.fl.""" -import gsd.fl -import gsd.pygsd -import numpy +import os +import pathlib import platform -import pytest import random -import pathlib -import os import shutil import sys +import numpy +import pytest + +import gsd.fl +import gsd.pygsd + test_path = pathlib.Path(os.path.realpath(__file__)).parent diff --git a/gsd/test/test_hoomd.py b/gsd/test/test_hoomd.py index 4baf4140..21488e99 100644 --- a/gsd/test/test_hoomd.py +++ b/gsd/test/test_hoomd.py @@ -3,12 +3,14 @@ """Test the gsd.hoomd API.""" -import gsd.fl -import gsd.hoomd -import numpy import pickle + +import numpy import pytest +import gsd.fl +import gsd.hoomd + def test_create(tmp_path): """Test that gsd files can be created.""" @@ -64,7 +66,7 @@ def test_extend(tmp_path, open_mode): with gsd.hoomd.open(name=tmp_path / "test_extend.gsd", mode=open_mode.write) as hf: - hf.extend((create_frame(i) for i in range(5))) + hf.extend(create_frame(i) for i in range(5)) with gsd.hoomd.open(name=tmp_path / "test_extend.gsd", mode=open_mode.read) as hf: @@ -555,7 +557,7 @@ def test_iteration(tmp_path, open_mode): """Test the iteration protocols for hoomd trajectories.""" with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", mode=open_mode.write) as hf: - hf.extend((create_frame(i) for i in range(20))) + hf.extend(create_frame(i) for i in range(20)) with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", mode=open_mode.read) as hf: @@ -597,7 +599,7 @@ def test_slicing_and_iteration(tmp_path, open_mode): """Test that hoomd trajectories can be sliced.""" with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", mode=open_mode.write) as hf: - hf.extend((create_frame(i) for i in range(20))) + hf.extend(create_frame(i) for i in range(20)) with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", mode=open_mode.read) as hf: @@ -640,7 +642,7 @@ def test_view_slicing_and_iteration(tmp_path, open_mode): """Test that trajectories can be sliced.""" with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", mode=open_mode.write) as hf: - hf.extend((create_frame(i) for i in range(40))) + hf.extend(create_frame(i) for i in range(40)) with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", mode=open_mode.read) as hf: @@ -688,7 +690,7 @@ def test_view_slicing_and_iteration(tmp_path, open_mode): def test_truncate(tmp_path): """Test the truncate API.""" with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", mode='w') as hf: - hf.extend((create_frame(i) for i in range(20))) + hf.extend(create_frame(i) for i in range(20)) assert len(hf) == 20 s = hf[10] # noqa @@ -786,7 +788,7 @@ def test_log(tmp_path, open_mode): def test_pickle(tmp_path): """Test that hoomd trajectory objects can be pickled.""" with gsd.hoomd.open(name=tmp_path / "test_pickling.gsd", mode='w') as traj: - traj.extend((create_frame(i) for i in range(20))) + traj.extend(create_frame(i) for i in range(20)) with pytest.raises(pickle.PickleError): pkl = pickle.dumps(traj) with gsd.hoomd.open(name=tmp_path / "test_pickling.gsd", mode='r') as traj: diff --git a/gsd/test/test_largefile.py b/gsd/test/test_largefile.py index 50cb1d92..eb65d829 100644 --- a/gsd/test/test_largefile.py +++ b/gsd/test/test_largefile.py @@ -3,13 +3,15 @@ """Test the gsd.fl API with large files.""" -import gsd.hoomd +import gc + import numpy import pytest -import gc + +import gsd.hoomd -@pytest.mark.validate +@pytest.mark.validate() @pytest.mark.parametrize("N", [2**27, 2**28, 2**29 + 1]) def test_large_n(tmp_path, N): """Test data chunks and files larger than 2 GB.""" diff --git a/scripts/benchmark-hoomd.py b/scripts/benchmark-hoomd.py index 7c662bf8..d569210c 100755 --- a/scripts/benchmark-hoomd.py +++ b/scripts/benchmark-hoomd.py @@ -3,16 +3,18 @@ """Benchmark GSD HOOMD file read/write.""" -import time -import gsd.fl -import gsd.pygsd -import gsd.hoomd -import os import math +import os import random -import numpy import sys -from subprocess import call, PIPE +import time +from subprocess import PIPE, call + +import numpy + +import gsd.fl +import gsd.hoomd +import gsd.pygsd # import logging # logging.basicConfig(level=logging.DEBUG) diff --git a/setup.py b/setup.py index 41749101..417788c9 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,10 @@ # Copyright (c) 2016-2023 The Regents of the University of Michigan # Part of GSD, released under the BSD 2-Clause License. -from setuptools import setup -from setuptools.extension import Extension import numpy from Cython.Build import cythonize +from setuptools import setup +from setuptools.extension import Extension extensions = cythonize( [Extension( From 841f877ac8bdf2a5a75370ea6899e4055bd5e76b Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 11:23:08 -0500 Subject: [PATCH 3/9] Apply unsafe fixes. --- gsd/__main__.py | 3 ++- gsd/hoomd.py | 23 +++++++++++++++-------- gsd/pygsd.py | 9 ++++++--- gsd/test/test_fl.py | 8 ++++---- scripts/benchmark-hoomd.py | 8 ++++---- 5 files changed, 31 insertions(+), 20 deletions(-) diff --git a/gsd/__main__.py b/gsd/__main__.py index 1e3e1f96..e78d01e7 100644 --- a/gsd/__main__.py +++ b/gsd/__main__.py @@ -68,7 +68,8 @@ def main_read(args): attributes.update({"Number of frames": len(traj)}) else: if args.mode not in ['rb', 'rb+', 'ab', 'a', 'r', 'r+']: - raise ValueError("Unsupported schema for creating a file.") + msg = "Unsupported schema for creating a file." + raise ValueError(msg) handle = fl.open(args.file, args.mode) local_ns.update({ 'handle': handle, diff --git a/gsd/hoomd.py b/gsd/hoomd.py index f2a17654..00ca60d7 100644 --- a/gsd/hoomd.py +++ b/gsd/hoomd.py @@ -254,7 +254,8 @@ def validate(self): if (self.types is not None and (not len(set(self.types)) == len(self.types))): - raise ValueError("Type names must be unique.") + msg = "Type names must be unique." + raise ValueError(msg) class BondData: @@ -345,7 +346,8 @@ def validate(self): if (self.types is not None and (not len(set(self.types)) == len(self.types))): - raise ValueError("Type names must be unique.") + msg = "Type names must be unique." + raise ValueError(msg) class ConstraintData: @@ -687,7 +689,8 @@ class HOOMDTrajectory: def __init__(self, file): if file.mode == 'ab': - raise ValueError('Append mode not yet supported') + msg = 'Append mode not yet supported' + raise ValueError(msg) self._file = file self._initial_frame = None @@ -1079,9 +1082,11 @@ def open(name, mode='r'): """ if fl is None: - raise RuntimeError("file layer module is not available") + msg = "file layer module is not available" + raise RuntimeError(msg) if gsd is None: - raise RuntimeError("gsd module is not available") + msg = "gsd module is not available" + raise RuntimeError(msg) gsdfileobj = fl.open(name=str(name), mode=mode, @@ -1126,9 +1131,11 @@ def read_log(name, scalar_only=False): df """ if fl is None: - raise RuntimeError("file layer module is not available") + msg = "file layer module is not available" + raise RuntimeError(msg) if gsd is None: - raise RuntimeError("gsd module is not available") + msg = "gsd module is not available" + raise RuntimeError(msg) with fl.open(name=str(name), mode='r', @@ -1163,7 +1170,7 @@ def read_log(name, scalar_only=False): else: logged_data_dict[log] = numpy.tile( tmp, - (gsdfileobj.nframes,) + tuple(1 for _ in tmp.shape)) + (gsdfileobj.nframes, *tuple(1 for _ in tmp.shape))) for idx in range(1, gsdfileobj.nframes): for log in logged_data_dict.keys(): diff --git a/gsd/pygsd.py b/gsd/pygsd.py index 2489bec4..481982e4 100644 --- a/gsd/pygsd.py +++ b/gsd/pygsd.py @@ -274,7 +274,8 @@ def chunk_exists(self, frame, name): return None """ if not self.__is_open: - raise ValueError("File is not open") + msg = "File is not open" + raise ValueError(msg) chunk = self._find_chunk(frame, name) return chunk is not None @@ -317,7 +318,8 @@ def read_chunk(self, frame, name): arrays instead. """ if not self.__is_open: - raise ValueError("File is not open") + msg = "File is not open" + raise ValueError(msg) chunk = self._find_chunk(frame, name) @@ -429,7 +431,8 @@ def application(self): def nframes(self): """int: Number of frames in the file.""" if not self.__is_open: - raise ValueError("File is not open") + msg = "File is not open" + raise ValueError(msg) if len(self.__index) == 0: return 0 diff --git a/gsd/test/test_fl.py b/gsd/test/test_fl.py index d538658f..f73fd278 100644 --- a/gsd/test/test_fl.py +++ b/gsd/test/test_fl.py @@ -100,7 +100,7 @@ def test_metadata(tmp_path, open_mode): schema='none', schema_version=[1, 2]) as f: assert f.mode == open_mode.write - for i in range(150): + for _i in range(150): f.write_chunk(name='data', data=data) f.end_frame() @@ -263,7 +263,7 @@ def test_readonly_errors(tmp_path, open_mode): application='test_readonly_errors', schema='none', schema_version=[1, 2]) as f: - for i in range(10): + for _i in range(10): f.write_chunk(name='chunk1', data=data) f.end_frame() @@ -368,7 +368,7 @@ def test_truncate(tmp_path): schema='none', schema_version=[1, 2]) as f: assert f.mode == 'w' - for i in range(10): + for _i in range(10): f.write_chunk(name='data', data=data) f.end_frame() @@ -987,7 +987,7 @@ def test_flush(tmp_path, open_mode, n_flush): # 0 calls to flush tests the implicit flush on close, 2 calls to flush # tests that repeated calls are handled properly. - for i in range(n_flush): + for _i in range(n_flush): f.flush() with gsd.fl.open(name=tmp_path / 'test_flush.gsd', diff --git a/scripts/benchmark-hoomd.py b/scripts/benchmark-hoomd.py index d569210c..72155387 100755 --- a/scripts/benchmark-hoomd.py +++ b/scripts/benchmark-hoomd.py @@ -175,8 +175,8 @@ def run_sweep(size, size_str): if True: result = run_benchmarks(32 * 32, size) - print("{0:<7} {1:<6} {2:<9.4g} {3:<12.4g} " - "{4:<11.4g} {5:<13.4g} {6:<11.3g}".format( + print("{:<7} {:<6} {:<9.4g} {:<12.4g} " + "{:<11.4g} {:<13.4g} {:<11.3g}".format( size_str, "32^2", result['open_time'] * 1000, result['write'], result['seq_read'], result['random_read'], result['random_read_time'])) @@ -184,7 +184,7 @@ def run_sweep(size, size_str): result = run_benchmarks(128 * 128, size) - print("{0:<7} {1:<6} {2:<9.4g} {3:<12.4g} {4:<11.4g} {5:<13.4g} {6:<11.3g}" + print("{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}" .format(size_str, "128^2", result['open_time'] * 1000, result['write'], result['seq_read'], result['random_read'], result['random_read_time'])) @@ -192,7 +192,7 @@ def run_sweep(size, size_str): result = run_benchmarks(1024 * 1024, size) - print("{0:<7} {1:<6} {2:<9.4g} {3:<12.4g} {4:<11.4g} {5:<13.4g} {6:<11.3g}" + print("{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}" .format(size_str, "1024^2", result['open_time'] * 1000, result['write'], result['seq_read'], result['random_read'], result['random_read_time'])) From 90688a3b5916e3fd7b21abbf4490f8baaa886bf0 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:44:33 -0500 Subject: [PATCH 4/9] Pass ruff check. --- .ruff.toml | 9 +++ gsd/hoomd.py | 108 ++++++++++++++--------------- gsd/pygsd.py | 11 +-- gsd/pytest_plugin_validate.py | 2 +- gsd/test/test_fl.py | 123 ++++++++++++++++++---------------- setup.py | 2 + 6 files changed, 133 insertions(+), 122 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index b99500d1..bfab8bbb 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -24,6 +24,8 @@ ignore = [ "D107", # Do not document __init__ separately from the class. "PLR09", # Allow "too many" statements/arguments/etc... "N816", # Allow mixed case names like kT. + "PT011", # PT011 insists that specific pytest.raises checks should impossibly more specific + "RUF012", # gsd does not use typing hints ] [lint.per-file-ignores] @@ -31,6 +33,13 @@ ignore = [ "__init__.py" = ["F401", # __init__.py import submodules for use by the package importer. ] +"gsd/test/*.py" = ["PLR2004", # unit test value comparisons are not magic values +] + +"doc/conf.py" = ["A001", # Allow copyright variable name + "D", # conf.py does not need documentation + ] + [pydocstyle] convention = "google" diff --git a/gsd/hoomd.py b/gsd/hoomd.py index 00ca60d7..af33f685 100644 --- a/gsd/hoomd.py +++ b/gsd/hoomd.py @@ -28,14 +28,15 @@ import numpy try: - from gsd import fl + import gsd except ImportError: - fl = None + gsd = None +fl_imported = True try: - import gsd + import gsd.fl except ImportError: - gsd = None + fl_imported = False logger = logging.getLogger('gsd.hoomd') @@ -69,10 +70,9 @@ def __init__(self): @property def box(self): - """((6, 1) `numpy.ndarray` of ``numpy.float32``): Box dimensions \ - (:chunk:`configuration/box`). + """((6, 1) `numpy.ndarray` of ``numpy.float32``): Box dimensions. - [lx, ly, lz, xy, xz, yz]. + [lx, ly, lz, xy, xz, yz]. See :chunk:`configuration/box`. """ return self._box @@ -674,8 +674,8 @@ def __len__(self): def __getitem__(self, key): if isinstance(key, slice): return type(self)(self._trajectory, self._indices[key]) - else: - return self._trajectory[self._indices[key]] + + return self._trajectory[self._indices[key]] class HOOMDTrajectory: @@ -874,35 +874,32 @@ def _read_frame(self, idx): step_arr = self.file.read_chunk(frame=idx, name='configuration/step') frame.configuration.step = step_arr[0] + elif self._initial_frame is not None: + frame.configuration.step = \ + self._initial_frame.configuration.step else: - if self._initial_frame is not None: - frame.configuration.step = \ - self._initial_frame.configuration.step - else: - frame.configuration.step = \ - frame.configuration._default_value['step'] + frame.configuration.step = \ + frame.configuration._default_value['step'] if self.file.chunk_exists(frame=idx, name='configuration/dimensions'): dimensions_arr = self.file.read_chunk( frame=idx, name='configuration/dimensions') frame.configuration.dimensions = dimensions_arr[0] + elif self._initial_frame is not None: + frame.configuration.dimensions = \ + self._initial_frame.configuration.dimensions else: - if self._initial_frame is not None: - frame.configuration.dimensions = \ - self._initial_frame.configuration.dimensions - else: - frame.configuration.dimensions = \ - frame.configuration._default_value['dimensions'] + frame.configuration.dimensions = \ + frame.configuration._default_value['dimensions'] if self.file.chunk_exists(frame=idx, name='configuration/box'): frame.configuration.box = self.file.read_chunk( frame=idx, name='configuration/box') + elif self._initial_frame is not None: + frame.configuration.box = self._initial_frame.configuration.box else: - if self._initial_frame is not None: - frame.configuration.box = self._initial_frame.configuration.box - else: - frame.configuration.box = \ - frame.configuration._default_value['box'] + frame.configuration.box = \ + frame.configuration._default_value['box'] # then read all groups that have N, types, etc... for path in [ @@ -922,9 +919,8 @@ def _read_frame(self, idx): if self.file.chunk_exists(frame=idx, name=path + '/N'): N_arr = self.file.read_chunk(frame=idx, name=path + '/N') container.N = N_arr[0] - else: - if self._initial_frame is not None: - container.N = initial_frame_container.N + elif self._initial_frame is not None: + container.N = initial_frame_container.N # type names if 'types' in container._default_value: @@ -933,11 +929,10 @@ def _read_frame(self, idx): tmp = tmp.view(dtype=numpy.dtype((bytes, tmp.shape[1]))) tmp = tmp.reshape([tmp.shape[0]]) container.types = list(a.decode('UTF-8') for a in tmp) + elif self._initial_frame is not None: + container.types = initial_frame_container.types else: - if self._initial_frame is not None: - container.types = initial_frame_container.types - else: - container.types = container._default_value['types'] + container.types = container._default_value['types'] # type shapes if ('type_shapes' in container._default_value @@ -951,13 +946,12 @@ def _read_frame(self, idx): container.type_shapes = \ list(json.loads(json_string.decode('UTF-8')) for json_string in tmp) + elif self._initial_frame is not None: + container.type_shapes = \ + initial_frame_container.type_shapes else: - if self._initial_frame is not None: - container.type_shapes = \ - initial_frame_container.type_shapes - else: - container.type_shapes = \ - container._default_value['type_shapes'] + container.type_shapes = \ + container._default_value['type_shapes'] for name in container._default_value: if name in ('N', 'types', 'type_shapes'): @@ -995,9 +989,8 @@ def _read_frame(self, idx): for log in logged_data_names: if self.file.chunk_exists(frame=idx, name=log): frame.log[log[4:]] = self.file.read_chunk(frame=idx, name=log) - else: - if self._initial_frame is not None: - frame.log[log[4:]] = self._initial_frame.log[log[4:]] + elif self._initial_frame is not None: + frame.log[log[4:]] = self._initial_frame.log[log[4:]] # store initial frame if self._initial_frame is None and idx == 0: @@ -1018,14 +1011,15 @@ def __getitem__(self, key): """ if isinstance(key, slice): return _HOOMDTrajectoryView(self, range(*key.indices(len(self)))) - elif isinstance(key, int): + + if isinstance(key, int): if key < 0: key += len(self) if key >= len(self) or key < 0: raise IndexError() return self._read_frame(key) - else: - raise TypeError + + raise TypeError def __iter__(self): """Iterate over frames in the trajectory.""" @@ -1044,7 +1038,7 @@ def flush(self): self._file.flush() -def open(name, mode='r'): +def open(name, mode='r'): # noqa: A001 - allow shadowing builtin open """Open a hoomd schema GSD file. The return value of `open` can be used as a context manager. @@ -1081,14 +1075,14 @@ def open(name, mode='r'): +------------------+---------------------------------------------+ """ - if fl is None: + if not fl_imported: msg = "file layer module is not available" raise RuntimeError(msg) if gsd is None: msg = "gsd module is not available" raise RuntimeError(msg) - gsdfileobj = fl.open(name=str(name), + gsdfileobj = gsd.fl.open(name=str(name), mode=mode, application='gsd.hoomd ' + gsd.version.version, schema='hoomd', @@ -1130,14 +1124,14 @@ def read_log(name, scalar_only=False): scalar_only=True)) df """ - if fl is None: + if not fl_imported: msg = "file layer module is not available" raise RuntimeError(msg) if gsd is None: msg = "gsd module is not available" raise RuntimeError(msg) - with fl.open(name=str(name), + with gsd.fl.open(name=str(name), mode='r', application='gsd.hoomd ' + gsd.version.version, schema='hoomd', @@ -1148,7 +1142,7 @@ def read_log(name, scalar_only=False): logged_data_names.insert(0, 'configuration/step') if len(logged_data_names) == 1: warnings.warn('No logged data in file: ' + str(name), - RuntimeWarning) + RuntimeWarning, stacklevel=2) logged_data_dict = dict() for log in logged_data_names: @@ -1173,13 +1167,13 @@ def read_log(name, scalar_only=False): (gsdfileobj.nframes, *tuple(1 for _ in tmp.shape))) for idx in range(1, gsdfileobj.nframes): - for log in logged_data_dict.keys(): - if not gsdfileobj.chunk_exists(frame=idx, name=log): + for key in logged_data_dict.keys(): + if not gsdfileobj.chunk_exists(frame=idx, name=key): continue - data = gsdfileobj.read_chunk(frame=idx, name=log) - if len(logged_data_dict[log][idx].shape) == 0: - logged_data_dict[log][idx] = data[0] + data = gsdfileobj.read_chunk(frame=idx, name=key) + if len(logged_data_dict[key][idx].shape) == 0: + logged_data_dict[key][idx] = data[0] else: - logged_data_dict[log][idx] = data + logged_data_dict[key][idx] = data return logged_data_dict diff --git a/gsd/pygsd.py b/gsd/pygsd.py index 481982e4..aa2cdd9e 100644 --- a/gsd/pygsd.py +++ b/gsd/pygsd.py @@ -121,7 +121,8 @@ def __init__(self, file): self.__header = gsd_header._make(gsd_header_struct.unpack(header_raw)) # validate the header - if self.__header.magic != 0x65DF65DF65DF65DF: + expected_magic = 0x65DF65DF65DF65DF + if self.__header.magic != expected_magic: raise RuntimeError("Not a GSD file: " + str(self.__file)) if (self.__header.gsd_version < (1 << 16) and self.__header.gsd_version != (0 << 16 | 3)): @@ -349,8 +350,8 @@ def read_chunk(self, frame, name): if chunk.M == 1: return data_npy - else: - return data_npy.reshape([chunk.N, chunk.M]) + + return data_npy.reshape([chunk.N, chunk.M]) def find_matching_chunk_names(self, match): """Find chunk names in the file that start with the string *match*. @@ -436,5 +437,5 @@ def nframes(self): if len(self.__index) == 0: return 0 - else: - return self.__index[-1].frame + 1 + + return self.__index[-1].frame + 1 diff --git a/gsd/pytest_plugin_validate.py b/gsd/pytest_plugin_validate.py index faac8d83..d3c5c92e 100644 --- a/gsd/pytest_plugin_validate.py +++ b/gsd/pytest_plugin_validate.py @@ -20,7 +20,7 @@ def pytest_addoption(parser): @pytest.fixture(autouse=True) -def skip_validate(request): +def _skip_validate(request): """Skip validation tests by default. Pass the command line option --validate to enable these tests. diff --git a/gsd/test/test_fl.py b/gsd/test/test_fl.py index f73fd278..3b00b4b3 100644 --- a/gsd/test/test_fl.py +++ b/gsd/test/test_fl.py @@ -205,23 +205,23 @@ def test_chunk_exists(tmp_path, open_mode): read_data = f.read_chunk(frame=2, name='test') assert not f.chunk_exists(frame=1, name='chunk1') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=1, name='chunk1') assert not f.chunk_exists(frame=2, name='abcdefg') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=2, name='abcdefg') assert not f.chunk_exists(frame=0, name='test') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=0, name='test') assert not f.chunk_exists(frame=2, name='chunk1') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=2, name='chunk1') assert not f.chunk_exists(frame=0, name='abcdefg') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=0, name='abcdefg') assert not f.chunk_exists(frame=1, name='test') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=1, name='test') # test again with pygsd @@ -235,23 +235,23 @@ def test_chunk_exists(tmp_path, open_mode): read_data = f.read_chunk(frame=2, name='test') assert not f.chunk_exists(frame=1, name='chunk1') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=1, name='chunk1') assert not f.chunk_exists(frame=2, name='abcdefg') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=2, name='abcdefg') assert not f.chunk_exists(frame=0, name='test') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=0, name='test') assert not f.chunk_exists(frame=2, name='chunk1') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=2, name='chunk1') assert not f.chunk_exists(frame=0, name='abcdefg') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=0, name='abcdefg') assert not f.chunk_exists(frame=1, name='test') - with pytest.raises(Exception): + with pytest.raises(KeyError): read_data = f.read_chunk(frame=1, name='test') # noqa @@ -273,20 +273,20 @@ def test_readonly_errors(tmp_path, open_mode): application='test_readonly_errors', schema='none', schema_version=[1, 2]) as f: - with pytest.raises(Exception): + with pytest.raises(RuntimeError): f.end_frame() - with pytest.raises(Exception): + with pytest.raises(RuntimeError): f.write_chunk(name='chunk1', data=data) # test again with pygsd with gsd.pygsd.GSDFile( file=open(str(tmp_path / 'test_readonly_errors.gsd'), mode='rb')) as f: - with pytest.raises(Exception): + with pytest.raises(NotImplementedError): f.end_frame() - with pytest.raises(Exception): + with pytest.raises(NotImplementedError): f.write_chunk(name='chunk1', data=data) @@ -294,8 +294,9 @@ def test_fileio_errors(tmp_path, open_mode): """Test that OS file I/O errors pass through.""" # These test cause python to crash on windows.... if platform.system() != "Windows": - with pytest.raises(Exception): + with pytest.raises(FileNotFoundError): gsd.fl.open(name='/this/file/does/not/exist', + mode='r', application='test_readonly_errors', schema='none', schema_version=[1, 2]) @@ -313,49 +314,50 @@ def test_fileio_errors(tmp_path, open_mode): def test_dtype_errors(tmp_path, open_mode): """Test that unsupported data types result in errors.""" - with pytest.raises(Exception): - data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.bool_) + data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.bool_) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors1.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open(name=tmp_path / 'test_dtype_errors1.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2]) as f: + with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) - f.end_frame() + f.end_frame() - with pytest.raises(Exception): - data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.float16) + data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.float16) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors2.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open(name=tmp_path / 'test_dtype_errors2.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2]) as f: + with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) - f.end_frame() + f.end_frame() - with pytest.raises(Exception): - data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex64) + data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex64) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors3.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open(name=tmp_path / 'test_dtype_errors3.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2]) as f: + with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) - f.end_frame() + f.end_frame() - with pytest.raises(Exception): - data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex128) + data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex128) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors4.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open(name=tmp_path / 'test_dtype_errors4.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2]) as f: + with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) - f.end_frame() + + f.end_frame() def test_truncate(tmp_path): @@ -580,7 +582,7 @@ def test_many_names(tmp_path, open_mode): application='test_many_names', schema='none', schema_version=[1, 2]) as f: - for frame in range(5): + for _ in range(5): random.shuffle(values) for value in values: f.write_chunk(name=str(value), @@ -716,10 +718,11 @@ def test_gsd_v1_write(tmp_path, open_mode): values_str = [] for v in values: - if type(v) is str and len(v) > 63: + check_v = v + if isinstance(v, str) and len(v) > 63: # v1 files truncate names to 63 chars - v = v[0:63] - values_str.append(str(v)) + check_v = v[0:63] + values_str.append(str(check_v)) values_str.sort() shutil.copy(test_path / 'test_gsd_v1.gsd', tmp_path / 'test_gsd_v1.gsd') @@ -737,15 +740,17 @@ def check_v1_file_read(f): frame = 5 random.shuffle(values) for value in values: - if type(value) is int: + check_value = value + + if isinstance(value, int): data = numpy.array([value * 13], dtype=numpy.int32) else: data = numpy.array([hash(value)], dtype=numpy.int64) # v1 files truncate names to 63 chars if len(value) > 63: - value = value[0:63] + check_value = value[0:63] - data_read = f.read_chunk(frame=frame, name=str(value)) + data_read = f.read_chunk(frame=frame, name=str(check_value)) numpy.testing.assert_array_equal(data, data_read) # test that we can write new entries to the file @@ -758,7 +763,7 @@ def check_v1_file_read(f): assert f.gsd_version == (1, 0) for value in values: - if type(value) is int: + if isinstance(value, int): data = numpy.array([value * 13], dtype=numpy.int32) else: data = numpy.array([hash(value)], dtype=numpy.int64) @@ -809,7 +814,7 @@ def check_v1_file_read(f): frame = 5 random.shuffle(values) for value in values: - if type(value) is int: + if isinstance(value, int): data = numpy.array([value * 13], dtype=numpy.int32) else: data = numpy.array([hash(value)], dtype=numpy.int64) @@ -831,7 +836,7 @@ def check_v1_file_read(f): assert f.gsd_version == (2, 0) for value in values: - if type(value) is int: + if isinstance(value, int): data = numpy.array([value * 13], dtype=numpy.int32) else: data = numpy.array([hash(value)], dtype=numpy.int64) diff --git a/setup.py b/setup.py index 417788c9..95a2435c 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,8 @@ # Copyright (c) 2016-2023 The Regents of the University of Michigan # Part of GSD, released under the BSD 2-Clause License. +"""Install gsd.""" + import numpy from Cython.Build import cythonize from setuptools import setup From cd6e83d0f04a48ea9b40bc21abbfeccc7cb3b241 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:46:57 -0500 Subject: [PATCH 5/9] Run ruff format. --- doc/conf.py | 52 +-- gsd/__main__.py | 119 +++--- gsd/conftest.py | 2 +- gsd/hoomd.py | 463 ++++++++++----------- gsd/pygsd.py | 77 ++-- gsd/pytest_plugin_validate.py | 11 +- gsd/test/test_fl.py | 750 +++++++++++++++++++--------------- gsd/test/test_hoomd.py | 498 ++++++++++++---------- gsd/test/test_largefile.py | 28 +- gsd/version.py | 2 +- scripts/benchmark-hoomd.py | 80 ++-- setup.py | 17 +- 12 files changed, 1154 insertions(+), 945 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 2e22a864..432bbe62 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,14 +16,15 @@ 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax', 'IPython.sphinxext.ipython_console_highlighting', - 'IPython.sphinxext.ipython_directive' + 'IPython.sphinxext.ipython_directive', ] napoleon_include_special_with_doc = True -intersphinx_mapping = {'python': ('https://docs.python.org/3', None), - 'numpy': ('https://numpy.org/doc/stable', None), - 'hoomd': ('https://hoomd-blue.readthedocs.io/en/latest/', None), +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'numpy': ('https://numpy.org/doc/stable', None), + 'hoomd': ('https://hoomd-blue.readthedocs.io/en/latest/', None), } autodoc_docstring_signature = True @@ -44,38 +45,42 @@ default_role = 'any' -pygments_style = "friendly" -pygments_dark_style = "native" +pygments_style = 'friendly' +pygments_dark_style = 'native' html_theme = 'furo' html_static_path = ['_static'] html_theme_options = { - "dark_css_variables": { - "color-brand-primary": "#5187b2", - "color-brand-content": "#5187b2", + 'dark_css_variables': { + 'color-brand-primary': '#5187b2', + 'color-brand-content': '#5187b2', }, - "light_css_variables": { - "color-brand-primary": "#406a8c", - "color-brand-content": "#406a8c", + 'light_css_variables': { + 'color-brand-primary': '#406a8c', + 'color-brand-content': '#406a8c', }, } + ### Add custom directives def setup(app): - app.add_object_type('chunk', 'chunk', - objname='Data chunk', - indextemplate='single: %s (data chunk)') + app.add_object_type( + 'chunk', 'chunk', objname='Data chunk', indextemplate='single: %s (data chunk)' + ) + tmpdir = tempfile.TemporaryDirectory() ###### IPython directive settings ipython_mplbackend = '' -ipython_execlines = ['import gsd.fl', - 'import gsd.hoomd', - 'import gsd.pygsd', - 'import numpy', - 'import os', - f'os.chdir("{tmpdir.name}")'] +ipython_execlines = [ + 'import gsd.fl', + 'import gsd.hoomd', + 'import gsd.pygsd', + 'import numpy', + 'import os', + f'os.chdir("{tmpdir.name}")', +] dirname = os.path.abspath(os.path.dirname(__file__)) @@ -83,11 +88,10 @@ def setup(app): breathe_default_project = 'gsd' breathe_domain_by_extension = { - "h" : "c", + 'h': 'c', } read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True' if read_the_docs_build: - - subprocess.call('cd ..; doxygen', shell=True) + subprocess.call('cd ..; doxygen', shell=True) diff --git a/gsd/__main__.py b/gsd/__main__.py index e78d01e7..9545af72 100644 --- a/gsd/__main__.py +++ b/gsd/__main__.py @@ -61,28 +61,35 @@ def main_read(args): if args.schema == 'hoomd': traj = hoomd_open(args.file, mode=args.mode) handle = traj.file - local_ns.update({ - 'handle': handle, - 'traj': traj, - }) - attributes.update({"Number of frames": len(traj)}) + local_ns.update( + { + 'handle': handle, + 'traj': traj, + } + ) + attributes.update({'Number of frames': len(traj)}) else: if args.mode not in ['rb', 'rb+', 'ab', 'a', 'r', 'r+']: - msg = "Unsupported schema for creating a file." + msg = 'Unsupported schema for creating a file.' raise ValueError(msg) handle = fl.open(args.file, args.mode) - local_ns.update({ - 'handle': handle, - }) + local_ns.update( + { + 'handle': handle, + } + ) - extras = "\n".join( - f"{key}: {val}" for key, val in attributes.items()) + extras = '\n'.join(f'{key}: {val}' for key, val in attributes.items()) - code.interact(local=local_ns, - banner=SHELL_BANNER.format(python_version=sys.version, - gsd_version=version.version, - fn=args.file, - extras=extras + "\n")) + code.interact( + local=local_ns, + banner=SHELL_BANNER.format( + python_version=sys.version, + gsd_version=version.version, + fn=args.file, + extras=extras + '\n', + ), + ) def main(): @@ -95,46 +102,48 @@ def main(): * read """ parser = argparse.ArgumentParser( - description="The gsd package encodes canonical readers and writers " - "for the gsd file format.") - parser.add_argument('--version', - action='store_true', - help="Display the version number and exit.") - parser.add_argument('--debug', - action='store_true', - help="Show traceback on error for debugging.") + description='The gsd package encodes canonical readers and writers ' + 'for the gsd file format.' + ) + parser.add_argument( + '--version', action='store_true', help='Display the version number and exit.' + ) + parser.add_argument( + '--debug', action='store_true', help='Show traceback on error for debugging.' + ) subparsers = parser.add_subparsers() parser_read = subparsers.add_parser('read') - parser_read.add_argument('file', - type=str, - nargs='?', - help="GSD file to read.") - parser_read.add_argument('-s', - '--schema', - type=str, - default='hoomd', - choices=['hoomd', 'none'], - help="The file schema.") - parser_read.add_argument('-m', - '--mode', - type=str, - default='r', - choices=[ - 'rb', - 'rb+', - 'wb', - 'wb+', - 'xb', - 'xb+', - 'ab', - 'w', - 'r', - 'r+', - 'x', - 'a', - ], - help="The file mode.") + parser_read.add_argument('file', type=str, nargs='?', help='GSD file to read.') + parser_read.add_argument( + '-s', + '--schema', + type=str, + default='hoomd', + choices=['hoomd', 'none'], + help='The file schema.', + ) + parser_read.add_argument( + '-m', + '--mode', + type=str, + default='r', + choices=[ + 'rb', + 'rb+', + 'wb', + 'wb+', + 'xb', + 'xb+', + 'ab', + 'w', + 'r', + 'r+', + 'x', + 'a', + ], + help='The file mode.', + ) parser_read.set_defaults(func=main_read) # This is a hack, as argparse itself does not @@ -153,12 +162,12 @@ def main(): args.func(args) except KeyboardInterrupt: _print_err() - _print_err("Interrupted.") + _print_err('Interrupted.') if args.debug: raise sys.exit(1) except RuntimeWarning as warning: - _print_err(f"Warning: {warning}") + _print_err(f'Warning: {warning}') if args.debug: raise sys.exit(1) diff --git a/gsd/conftest.py b/gsd/conftest.py index c32382f8..31bc1c05 100644 --- a/gsd/conftest.py +++ b/gsd/conftest.py @@ -3,4 +3,4 @@ """Global pytest options.""" -pytest_plugins = ("gsd.pytest_plugin_validate",) +pytest_plugins = ('gsd.pytest_plugin_validate',) diff --git a/gsd/hoomd.py b/gsd/hoomd.py index af33f685..83b78cc9 100644 --- a/gsd/hoomd.py +++ b/gsd/hoomd.py @@ -170,11 +170,9 @@ class ParticleData: _default_value['charge'] = numpy.float32(0) _default_value['diameter'] = numpy.float32(1.0) _default_value['body'] = numpy.int32(-1) - _default_value['moment_inertia'] = numpy.array([0, 0, 0], - dtype=numpy.float32) + _default_value['moment_inertia'] = numpy.array([0, 0, 0], dtype=numpy.float32) _default_value['position'] = numpy.array([0, 0, 0], dtype=numpy.float32) - _default_value['orientation'] = numpy.array([1, 0, 0, 0], - dtype=numpy.float32) + _default_value['orientation'] = numpy.array([1, 0, 0, 0], dtype=numpy.float32) _default_value['velocity'] = numpy.array([0, 0, 0], dtype=numpy.float32) _default_value['angmom'] = numpy.array([0, 0, 0, 0], dtype=numpy.float32) _default_value['image'] = numpy.array([0, 0, 0], dtype=numpy.int32) @@ -211,50 +209,45 @@ def validate(self): logger.debug('Validating ParticleData') if self.position is not None: - self.position = numpy.ascontiguousarray(self.position, - dtype=numpy.float32) + self.position = numpy.ascontiguousarray(self.position, dtype=numpy.float32) self.position = self.position.reshape([self.N, 3]) if self.orientation is not None: - self.orientation = numpy.ascontiguousarray(self.orientation, - dtype=numpy.float32) + self.orientation = numpy.ascontiguousarray( + self.orientation, dtype=numpy.float32 + ) self.orientation = self.orientation.reshape([self.N, 4]) if self.typeid is not None: - self.typeid = numpy.ascontiguousarray(self.typeid, - dtype=numpy.uint32) + self.typeid = numpy.ascontiguousarray(self.typeid, dtype=numpy.uint32) self.typeid = self.typeid.reshape([self.N]) if self.mass is not None: self.mass = numpy.ascontiguousarray(self.mass, dtype=numpy.float32) self.mass = self.mass.reshape([self.N]) if self.charge is not None: - self.charge = numpy.ascontiguousarray(self.charge, - dtype=numpy.float32) + self.charge = numpy.ascontiguousarray(self.charge, dtype=numpy.float32) self.charge = self.charge.reshape([self.N]) if self.diameter is not None: - self.diameter = numpy.ascontiguousarray(self.diameter, - dtype=numpy.float32) + self.diameter = numpy.ascontiguousarray(self.diameter, dtype=numpy.float32) self.diameter = self.diameter.reshape([self.N]) if self.body is not None: self.body = numpy.ascontiguousarray(self.body, dtype=numpy.int32) self.body = self.body.reshape([self.N]) if self.moment_inertia is not None: - self.moment_inertia = numpy.ascontiguousarray(self.moment_inertia, - dtype=numpy.float32) + self.moment_inertia = numpy.ascontiguousarray( + self.moment_inertia, dtype=numpy.float32 + ) self.moment_inertia = self.moment_inertia.reshape([self.N, 3]) if self.velocity is not None: - self.velocity = numpy.ascontiguousarray(self.velocity, - dtype=numpy.float32) + self.velocity = numpy.ascontiguousarray(self.velocity, dtype=numpy.float32) self.velocity = self.velocity.reshape([self.N, 3]) if self.angmom is not None: - self.angmom = numpy.ascontiguousarray(self.angmom, - dtype=numpy.float32) + self.angmom = numpy.ascontiguousarray(self.angmom, dtype=numpy.float32) self.angmom = self.angmom.reshape([self.N, 4]) if self.image is not None: self.image = numpy.ascontiguousarray(self.image, dtype=numpy.int32) self.image = self.image.reshape([self.N, 3]) - if (self.types is not None - and (not len(set(self.types)) == len(self.types))): - msg = "Type names must be unique." + if self.types is not None and (not len(set(self.types)) == len(self.types)): + msg = 'Type names must be unique.' raise ValueError(msg) @@ -337,16 +330,14 @@ def validate(self): logger.debug('Validating BondData') if self.typeid is not None: - self.typeid = numpy.ascontiguousarray(self.typeid, - dtype=numpy.uint32) + self.typeid = numpy.ascontiguousarray(self.typeid, dtype=numpy.uint32) self.typeid = self.typeid.reshape([self.N]) if self.group is not None: self.group = numpy.ascontiguousarray(self.group, dtype=numpy.int32) self.group = self.group.reshape([self.N, self.M]) - if (self.types is not None - and (not len(set(self.types)) == len(self.types))): - msg = "Type names must be unique." + if self.types is not None and (not len(set(self.types)) == len(self.types)): + msg = 'Type names must be unique.' raise ValueError(msg) @@ -383,8 +374,7 @@ def __init__(self): self._default_value = OrderedDict() self._default_value['N'] = numpy.uint32(0) self._default_value['value'] = numpy.float32(0) - self._default_value['group'] = numpy.array([0] * self.M, - dtype=numpy.int32) + self._default_value['group'] = numpy.array([0] * self.M, dtype=numpy.int32) def validate(self): """Validate all attributes. @@ -401,8 +391,7 @@ def validate(self): logger.debug('Validating ConstraintData') if self.value is not None: - self.value = numpy.ascontiguousarray(self.value, - dtype=numpy.float32) + self.value = numpy.ascontiguousarray(self.value, dtype=numpy.float32) self.value = self.value.reshape([self.N]) if self.group is not None: self.group = numpy.ascontiguousarray(self.group, dtype=numpy.int32) @@ -489,145 +478,154 @@ def validate(self): NT = 1 if 'hpmc/integrate/d' in self.state: - self.state['hpmc/integrate/d'] = \ - numpy.ascontiguousarray(self.state['hpmc/integrate/d'], - dtype=numpy.float64) - self.state['hpmc/integrate/d'] = \ - self.state['hpmc/integrate/d'].reshape([1]) + self.state['hpmc/integrate/d'] = numpy.ascontiguousarray( + self.state['hpmc/integrate/d'], dtype=numpy.float64 + ) + self.state['hpmc/integrate/d'] = self.state['hpmc/integrate/d'].reshape([1]) if 'hpmc/integrate/a' in self.state: - self.state['hpmc/integrate/a'] = \ - numpy.ascontiguousarray(self.state['hpmc/integrate/a'], - dtype=numpy.float64) - self.state['hpmc/integrate/a'] = \ - self.state['hpmc/integrate/a'].reshape([1]) + self.state['hpmc/integrate/a'] = numpy.ascontiguousarray( + self.state['hpmc/integrate/a'], dtype=numpy.float64 + ) + self.state['hpmc/integrate/a'] = self.state['hpmc/integrate/a'].reshape([1]) if 'hpmc/sphere/radius' in self.state: - self.state['hpmc/sphere/radius'] = \ - numpy.ascontiguousarray(self.state['hpmc/sphere/radius'], - dtype=numpy.float32) - self.state['hpmc/sphere/radius'] = \ - self.state['hpmc/sphere/radius'].reshape([NT]) + self.state['hpmc/sphere/radius'] = numpy.ascontiguousarray( + self.state['hpmc/sphere/radius'], dtype=numpy.float32 + ) + self.state['hpmc/sphere/radius'] = self.state['hpmc/sphere/radius'].reshape( + [NT] + ) if 'hpmc/sphere/orientable' in self.state: - self.state['hpmc/sphere/orientable'] = \ - numpy.ascontiguousarray(self.state['hpmc/sphere/orientable'], - dtype=numpy.uint8) - self.state['hpmc/sphere/orientable'] = \ - self.state['hpmc/sphere/orientable'].reshape([NT]) + self.state['hpmc/sphere/orientable'] = numpy.ascontiguousarray( + self.state['hpmc/sphere/orientable'], dtype=numpy.uint8 + ) + self.state['hpmc/sphere/orientable'] = self.state[ + 'hpmc/sphere/orientable' + ].reshape([NT]) if 'hpmc/ellipsoid/a' in self.state: - self.state['hpmc/ellipsoid/a'] = \ - numpy.ascontiguousarray(self.state['hpmc/ellipsoid/a'], - dtype=numpy.float32) - self.state['hpmc/ellipsoid/a'] = \ - self.state['hpmc/ellipsoid/a'].reshape([NT]) - self.state['hpmc/ellipsoid/b'] = \ - numpy.ascontiguousarray(self.state['hpmc/ellipsoid/b'], - dtype=numpy.float32) - self.state['hpmc/ellipsoid/b'] = \ - self.state['hpmc/ellipsoid/b'].reshape([NT]) - self.state['hpmc/ellipsoid/c'] = \ - numpy.ascontiguousarray(self.state['hpmc/ellipsoid/c'], - dtype=numpy.float32) - self.state['hpmc/ellipsoid/c'] = \ - self.state['hpmc/ellipsoid/c'].reshape([NT]) + self.state['hpmc/ellipsoid/a'] = numpy.ascontiguousarray( + self.state['hpmc/ellipsoid/a'], dtype=numpy.float32 + ) + self.state['hpmc/ellipsoid/a'] = self.state['hpmc/ellipsoid/a'].reshape( + [NT] + ) + self.state['hpmc/ellipsoid/b'] = numpy.ascontiguousarray( + self.state['hpmc/ellipsoid/b'], dtype=numpy.float32 + ) + self.state['hpmc/ellipsoid/b'] = self.state['hpmc/ellipsoid/b'].reshape( + [NT] + ) + self.state['hpmc/ellipsoid/c'] = numpy.ascontiguousarray( + self.state['hpmc/ellipsoid/c'], dtype=numpy.float32 + ) + self.state['hpmc/ellipsoid/c'] = self.state['hpmc/ellipsoid/c'].reshape( + [NT] + ) if 'hpmc/convex_polyhedron/N' in self.state: - self.state['hpmc/convex_polyhedron/N'] = \ - numpy.ascontiguousarray(self.state['hpmc/convex_polyhedron/N'], - dtype=numpy.uint32) - self.state['hpmc/convex_polyhedron/N'] = \ - self.state['hpmc/convex_polyhedron/N'].reshape([NT]) + self.state['hpmc/convex_polyhedron/N'] = numpy.ascontiguousarray( + self.state['hpmc/convex_polyhedron/N'], dtype=numpy.uint32 + ) + self.state['hpmc/convex_polyhedron/N'] = self.state[ + 'hpmc/convex_polyhedron/N' + ].reshape([NT]) sumN = numpy.sum(self.state['hpmc/convex_polyhedron/N']) - self.state['hpmc/convex_polyhedron/vertices'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_polyhedron/vertices'], - dtype=numpy.float32) - self.state['hpmc/convex_polyhedron/vertices'] = \ - self.state['hpmc/convex_polyhedron/vertices'].reshape([sumN, 3]) + self.state['hpmc/convex_polyhedron/vertices'] = numpy.ascontiguousarray( + self.state['hpmc/convex_polyhedron/vertices'], dtype=numpy.float32 + ) + self.state['hpmc/convex_polyhedron/vertices'] = self.state[ + 'hpmc/convex_polyhedron/vertices' + ].reshape([sumN, 3]) if 'hpmc/convex_spheropolyhedron/N' in self.state: - self.state['hpmc/convex_spheropolyhedron/N'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolyhedron/N'], - dtype=numpy.uint32) - self.state['hpmc/convex_spheropolyhedron/N'] = \ - self.state['hpmc/convex_spheropolyhedron/N'].reshape([NT]) + self.state['hpmc/convex_spheropolyhedron/N'] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolyhedron/N'], dtype=numpy.uint32 + ) + self.state['hpmc/convex_spheropolyhedron/N'] = self.state[ + 'hpmc/convex_spheropolyhedron/N' + ].reshape([NT]) sumN = numpy.sum(self.state['hpmc/convex_spheropolyhedron/N']) - self.state['hpmc/convex_spheropolyhedron/sweep_radius'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolyhedron/sweep_radius'], - dtype=numpy.float32) - self.state['hpmc/convex_spheropolyhedron/sweep_radius'] = \ - self.state[ - 'hpmc/convex_spheropolyhedron/sweep_radius'].reshape([NT]) - - self.state['hpmc/convex_spheropolyhedron/vertices'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolyhedron/vertices'], - dtype=numpy.float32) - self.state['hpmc/convex_spheropolyhedron/vertices'] = \ - self.state[ - 'hpmc/convex_spheropolyhedron/vertices'].reshape([sumN, 3]) + self.state[ + 'hpmc/convex_spheropolyhedron/sweep_radius' + ] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolyhedron/sweep_radius'], + dtype=numpy.float32, + ) + self.state['hpmc/convex_spheropolyhedron/sweep_radius'] = self.state[ + 'hpmc/convex_spheropolyhedron/sweep_radius' + ].reshape([NT]) + + self.state[ + 'hpmc/convex_spheropolyhedron/vertices' + ] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolyhedron/vertices'], dtype=numpy.float32 + ) + self.state['hpmc/convex_spheropolyhedron/vertices'] = self.state[ + 'hpmc/convex_spheropolyhedron/vertices' + ].reshape([sumN, 3]) if 'hpmc/convex_polygon/N' in self.state: - self.state['hpmc/convex_polygon/N'] = \ - numpy.ascontiguousarray(self.state['hpmc/convex_polygon/N'], - dtype=numpy.uint32) - self.state['hpmc/convex_polygon/N'] = \ - self.state['hpmc/convex_polygon/N'].reshape([NT]) + self.state['hpmc/convex_polygon/N'] = numpy.ascontiguousarray( + self.state['hpmc/convex_polygon/N'], dtype=numpy.uint32 + ) + self.state['hpmc/convex_polygon/N'] = self.state[ + 'hpmc/convex_polygon/N' + ].reshape([NT]) sumN = numpy.sum(self.state['hpmc/convex_polygon/N']) - self.state['hpmc/convex_polygon/vertices'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_polygon/vertices'], - dtype=numpy.float32) - self.state['hpmc/convex_polygon/vertices'] = \ - self.state['hpmc/convex_polygon/vertices'].reshape([sumN, 2]) + self.state['hpmc/convex_polygon/vertices'] = numpy.ascontiguousarray( + self.state['hpmc/convex_polygon/vertices'], dtype=numpy.float32 + ) + self.state['hpmc/convex_polygon/vertices'] = self.state[ + 'hpmc/convex_polygon/vertices' + ].reshape([sumN, 2]) if 'hpmc/convex_spheropolygon/N' in self.state: - self.state['hpmc/convex_spheropolygon/N'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolygon/N'], - dtype=numpy.uint32) - self.state['hpmc/convex_spheropolygon/N'] = \ - self.state['hpmc/convex_spheropolygon/N'].reshape([NT]) + self.state['hpmc/convex_spheropolygon/N'] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolygon/N'], dtype=numpy.uint32 + ) + self.state['hpmc/convex_spheropolygon/N'] = self.state[ + 'hpmc/convex_spheropolygon/N' + ].reshape([NT]) sumN = numpy.sum(self.state['hpmc/convex_spheropolygon/N']) - self.state['hpmc/convex_spheropolygon/sweep_radius'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolygon/sweep_radius'], - dtype=numpy.float32) - self.state['hpmc/convex_spheropolygon/sweep_radius'] = \ - self.state[ - 'hpmc/convex_spheropolygon/sweep_radius'].reshape([NT]) - - self.state['hpmc/convex_spheropolygon/vertices'] = \ - numpy.ascontiguousarray( - self.state['hpmc/convex_spheropolygon/vertices'], - dtype=numpy.float32) - self.state['hpmc/convex_spheropolygon/vertices'] = \ - self.state[ - 'hpmc/convex_spheropolygon/vertices'].reshape([sumN, 2]) + self.state[ + 'hpmc/convex_spheropolygon/sweep_radius' + ] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolygon/sweep_radius'], + dtype=numpy.float32, + ) + self.state['hpmc/convex_spheropolygon/sweep_radius'] = self.state[ + 'hpmc/convex_spheropolygon/sweep_radius' + ].reshape([NT]) + + self.state['hpmc/convex_spheropolygon/vertices'] = numpy.ascontiguousarray( + self.state['hpmc/convex_spheropolygon/vertices'], dtype=numpy.float32 + ) + self.state['hpmc/convex_spheropolygon/vertices'] = self.state[ + 'hpmc/convex_spheropolygon/vertices' + ].reshape([sumN, 2]) if 'hpmc/simple_polygon/N' in self.state: - self.state['hpmc/simple_polygon/N'] = \ - numpy.ascontiguousarray(self.state['hpmc/simple_polygon/N'], - dtype=numpy.uint32) - self.state['hpmc/simple_polygon/N'] = \ - self.state['hpmc/simple_polygon/N'].reshape([NT]) + self.state['hpmc/simple_polygon/N'] = numpy.ascontiguousarray( + self.state['hpmc/simple_polygon/N'], dtype=numpy.uint32 + ) + self.state['hpmc/simple_polygon/N'] = self.state[ + 'hpmc/simple_polygon/N' + ].reshape([NT]) sumN = numpy.sum(self.state['hpmc/simple_polygon/N']) - self.state['hpmc/simple_polygon/vertices'] = \ - numpy.ascontiguousarray( - self.state['hpmc/simple_polygon/vertices'], - dtype=numpy.float32) - self.state['hpmc/simple_polygon/vertices'] = \ - self.state[ - 'hpmc/simple_polygon/vertices'].reshape([sumN, 2]) + self.state['hpmc/simple_polygon/vertices'] = numpy.ascontiguousarray( + self.state['hpmc/simple_polygon/vertices'], dtype=numpy.float32 + ) + self.state['hpmc/simple_polygon/vertices'] = self.state[ + 'hpmc/simple_polygon/vertices' + ].reshape([sumN, 2]) for k in self.state: if k not in self._valid_state: @@ -698,15 +696,18 @@ def __init__(self, file): logger.info('opening HOOMDTrajectory: ' + str(self.file)) if self.file.schema != 'hoomd': - raise RuntimeError('GSD file is not a hoomd schema file: ' - + str(self.file)) + raise RuntimeError('GSD file is not a hoomd schema file: ' + str(self.file)) valid = False version = self.file.schema_version - if (version < (2, 0) and version >= (1, 0)): + if version < (2, 0) and version >= (1, 0): valid = True if not valid: - raise RuntimeError('Incompatible hoomd schema version ' - + str(version) + ' in: ' + str(self.file)) + raise RuntimeError( + 'Incompatible hoomd schema version ' + + str(version) + + ' in: ' + + str(self.file) + ) logger.info('found ' + str(len(self)) + ' frames') @@ -742,14 +743,14 @@ def append(self, frame): self._read_frame(0) for path in [ - 'configuration', - 'particles', - 'bonds', - 'angles', - 'dihedrals', - 'impropers', - 'constraints', - 'pairs', + 'configuration', + 'particles', + 'bonds', + 'angles', + 'dihedrals', + 'impropers', + 'constraints', + 'pairs', ]: container = getattr(frame, path) for name in container._default_value: @@ -765,9 +766,7 @@ def append(self, frame): data = numpy.array([data], dtype=numpy.uint8) if name in ('types', 'type_shapes'): if name == 'type_shapes': - data = [ - json.dumps(shape_dict) for shape_dict in data - ] + data = [json.dumps(shape_dict) for shape_dict in data] wid = max(len(w) for w in data) + 1 b = numpy.array(data, dtype=numpy.dtype((bytes, wid))) data = b.view(dtype=numpy.int8).reshape(len(b), wid) @@ -816,8 +815,9 @@ def _should_write(self, path, name, frame): initial_container = getattr(self._initial_frame, path) initial_data = getattr(initial_container, name) if numpy.array_equal(initial_data, data): - logger.debug('skipping data chunk, matches frame 0: ' + path - + '/' + name) + logger.debug( + 'skipping data chunk, matches frame 0: ' + path + '/' + name + ) return False matches_default_value = False @@ -825,12 +825,13 @@ def _should_write(self, path, name, frame): matches_default_value = data == container._default_value[name] else: matches_default_value = numpy.array_equiv( - data, container._default_value[name]) + data, container._default_value[name] + ) - if matches_default_value \ - and not self.file.chunk_exists(frame=0, name=path + '/' + name): - logger.debug('skipping data chunk, default value: ' + path + '/' - + name) + if matches_default_value and not self.file.chunk_exists( + frame=0, name=path + '/' + name + ): + logger.debug('skipping data chunk, default value: ' + path + '/' + name) return False return True @@ -871,45 +872,45 @@ def _read_frame(self, idx): frame = Frame() # read configuration first if self.file.chunk_exists(frame=idx, name='configuration/step'): - step_arr = self.file.read_chunk(frame=idx, - name='configuration/step') + step_arr = self.file.read_chunk(frame=idx, name='configuration/step') frame.configuration.step = step_arr[0] elif self._initial_frame is not None: - frame.configuration.step = \ - self._initial_frame.configuration.step + frame.configuration.step = self._initial_frame.configuration.step else: - frame.configuration.step = \ - frame.configuration._default_value['step'] + frame.configuration.step = frame.configuration._default_value['step'] if self.file.chunk_exists(frame=idx, name='configuration/dimensions'): dimensions_arr = self.file.read_chunk( - frame=idx, name='configuration/dimensions') + frame=idx, name='configuration/dimensions' + ) frame.configuration.dimensions = dimensions_arr[0] elif self._initial_frame is not None: - frame.configuration.dimensions = \ + frame.configuration.dimensions = ( self._initial_frame.configuration.dimensions + ) else: - frame.configuration.dimensions = \ - frame.configuration._default_value['dimensions'] + frame.configuration.dimensions = frame.configuration._default_value[ + 'dimensions' + ] if self.file.chunk_exists(frame=idx, name='configuration/box'): frame.configuration.box = self.file.read_chunk( - frame=idx, name='configuration/box') + frame=idx, name='configuration/box' + ) elif self._initial_frame is not None: frame.configuration.box = self._initial_frame.configuration.box else: - frame.configuration.box = \ - frame.configuration._default_value['box'] + frame.configuration.box = frame.configuration._default_value['box'] # then read all groups that have N, types, etc... for path in [ - 'particles', - 'bonds', - 'angles', - 'dihedrals', - 'impropers', - 'constraints', - 'pairs', + 'particles', + 'bonds', + 'angles', + 'dihedrals', + 'impropers', + 'constraints', + 'pairs', ]: container = getattr(frame, path) if self._initial_frame is not None: @@ -935,23 +936,18 @@ def _read_frame(self, idx): container.types = container._default_value['types'] # type shapes - if ('type_shapes' in container._default_value - and path == 'particles'): - if self.file.chunk_exists(frame=idx, - name=path + '/type_shapes'): - tmp = self.file.read_chunk(frame=idx, - name=path + '/type_shapes') + if 'type_shapes' in container._default_value and path == 'particles': + if self.file.chunk_exists(frame=idx, name=path + '/type_shapes'): + tmp = self.file.read_chunk(frame=idx, name=path + '/type_shapes') tmp = tmp.view(dtype=numpy.dtype((bytes, tmp.shape[1]))) tmp = tmp.reshape([tmp.shape[0]]) - container.type_shapes = \ - list(json.loads(json_string.decode('UTF-8')) - for json_string in tmp) + container.type_shapes = list( + json.loads(json_string.decode('UTF-8')) for json_string in tmp + ) elif self._initial_frame is not None: - container.type_shapes = \ - initial_frame_container.type_shapes + container.type_shapes = initial_frame_container.type_shapes else: - container.type_shapes = \ - container._default_value['type_shapes'] + container.type_shapes = container._default_value['type_shapes'] for name in container._default_value: if name in ('N', 'types', 'type_shapes'): @@ -960,20 +956,23 @@ def _read_frame(self, idx): # per particle/bond quantities if self.file.chunk_exists(frame=idx, name=path + '/' + name): container.__dict__[name] = self.file.read_chunk( - frame=idx, name=path + '/' + name) + frame=idx, name=path + '/' + name + ) else: - if (self._initial_frame is not None - and initial_frame_container.N == container.N): + if ( + self._initial_frame is not None + and initial_frame_container.N == container.N + ): # read default from initial frame - container.__dict__[name] = \ - initial_frame_container.__dict__[name] + container.__dict__[name] = initial_frame_container.__dict__[ + name + ] else: # initialize from default value tmp = numpy.array([container._default_value[name]]) s = list(tmp.shape) s[0] = container.N - container.__dict__[name] = numpy.empty(shape=s, - dtype=tmp.dtype) + container.__dict__[name] = numpy.empty(shape=s, dtype=tmp.dtype) container.__dict__[name][:] = tmp container.__dict__[name].flags.writeable = False @@ -981,8 +980,9 @@ def _read_frame(self, idx): # read state data for state in frame._valid_state: if self.file.chunk_exists(frame=idx, name='state/' + state): - frame.state[state] = self.file.read_chunk(frame=idx, - name='state/' + state) + frame.state[state] = self.file.read_chunk( + frame=idx, name='state/' + state + ) # read log data logged_data_names = self.file.find_matching_chunk_names('log/') @@ -1038,7 +1038,7 @@ def flush(self): self._file.flush() -def open(name, mode='r'): # noqa: A001 - allow shadowing builtin open +def open(name, mode='r'): # noqa: A001 - allow shadowing builtin open """Open a hoomd schema GSD file. The return value of `open` can be used as a context manager. @@ -1076,17 +1076,19 @@ def open(name, mode='r'): # noqa: A001 - allow shadowing builtin open """ if not fl_imported: - msg = "file layer module is not available" + msg = 'file layer module is not available' raise RuntimeError(msg) if gsd is None: - msg = "gsd module is not available" + msg = 'gsd module is not available' raise RuntimeError(msg) - gsdfileobj = gsd.fl.open(name=str(name), - mode=mode, - application='gsd.hoomd ' + gsd.version.version, - schema='hoomd', - schema_version=[1, 4]) + gsdfileobj = gsd.fl.open( + name=str(name), + mode=mode, + application='gsd.hoomd ' + gsd.version.version, + schema='hoomd', + schema_version=[1, 4], + ) return HOOMDTrajectory(gsdfileobj) @@ -1125,24 +1127,26 @@ def read_log(name, scalar_only=False): df """ if not fl_imported: - msg = "file layer module is not available" + msg = 'file layer module is not available' raise RuntimeError(msg) if gsd is None: - msg = "gsd module is not available" + msg = 'gsd module is not available' raise RuntimeError(msg) - with gsd.fl.open(name=str(name), - mode='r', - application='gsd.hoomd ' + gsd.version.version, - schema='hoomd', - schema_version=[1, 4]) as gsdfileobj: - + with gsd.fl.open( + name=str(name), + mode='r', + application='gsd.hoomd ' + gsd.version.version, + schema='hoomd', + schema_version=[1, 4], + ) as gsdfileobj: logged_data_names = gsdfileobj.find_matching_chunk_names('log/') # Always log timestep associated with each log entry logged_data_names.insert(0, 'configuration/step') if len(logged_data_names) == 1: - warnings.warn('No logged data in file: ' + str(name), - RuntimeWarning, stacklevel=2) + warnings.warn( + 'No logged data in file: ' + str(name), RuntimeWarning, stacklevel=2 + ) logged_data_dict = dict() for log in logged_data_names: @@ -1160,11 +1164,12 @@ def read_log(name, scalar_only=False): continue if tmp.shape[0] == 1: logged_data_dict[log] = numpy.full( - fill_value=tmp[0], shape=(gsdfileobj.nframes,)) + fill_value=tmp[0], shape=(gsdfileobj.nframes,) + ) else: logged_data_dict[log] = numpy.tile( - tmp, - (gsdfileobj.nframes, *tuple(1 for _ in tmp.shape))) + tmp, (gsdfileobj.nframes, *tuple(1 for _ in tmp.shape)) + ) for idx in range(1, gsdfileobj.nframes): for key in logged_data_dict.keys(): diff --git a/gsd/pygsd.py b/gsd/pygsd.py index aa2cdd9e..a608ebc9 100644 --- a/gsd/pygsd.py +++ b/gsd/pygsd.py @@ -36,7 +36,7 @@ import numpy -version = "3.2.0" +version = '3.2.0' logger = logging.getLogger('gsd.pygsd') @@ -49,8 +49,7 @@ ) gsd_header_struct = struct.Struct('QQQQQII64s64s80s') -gsd_index_entry = namedtuple('gsd_index_entry', - 'frame N location M id type flags') +gsd_index_entry = namedtuple('gsd_index_entry', 'frame N location M id type flags') gsd_index_entry_struct = struct.Struct('QQqIHBB') gsd_type_mapping = { @@ -111,8 +110,7 @@ def __init__(self, file): try: header_raw = self.__file.read(gsd_header_struct.size) except UnicodeDecodeError: - print("\nDid you open the file in binary mode (rb)?\n", - file=sys.stderr) + print('\nDid you open the file in binary mode (rb)?\n', file=sys.stderr) raise if len(header_raw) != gsd_header_struct.size: @@ -123,14 +121,13 @@ def __init__(self, file): # validate the header expected_magic = 0x65DF65DF65DF65DF if self.__header.magic != expected_magic: - raise RuntimeError("Not a GSD file: " + str(self.__file)) - if (self.__header.gsd_version < (1 << 16) - and self.__header.gsd_version != (0 << 16 | 3)): - raise RuntimeError("Unsupported GSD file version: " - + str(self.__file)) + raise RuntimeError('Not a GSD file: ' + str(self.__file)) + if self.__header.gsd_version < (1 << 16) and self.__header.gsd_version != ( + 0 << 16 | 3 + ): + raise RuntimeError('Unsupported GSD file version: ' + str(self.__file)) if self.__header.gsd_version >= (3 << 16): - raise RuntimeError("Unsupported GSD file version: " - + str(self.__file)) + raise RuntimeError('Unsupported GSD file version: ' + str(self.__file)) # determine the file size (only works in Python 3) self.__file.seek(0, 2) @@ -139,8 +136,7 @@ def __init__(self, file): self.__namelist = {} c = 0 self.__file.seek(self.__header.namelist_location, 0) - namelist_raw = self.__file.read(self.__header.namelist_allocated_entries - * 64) + namelist_raw = self.__file.read(self.__header.namelist_allocated_entries * 64) names = namelist_raw.split(b'\x00') @@ -159,18 +155,17 @@ def __init__(self, file): if len(index_entry_raw) != gsd_index_entry_struct.size: raise OSError - idx = gsd_index_entry._make( - gsd_index_entry_struct.unpack(index_entry_raw)) + idx = gsd_index_entry._make(gsd_index_entry_struct.unpack(index_entry_raw)) # 0 location signifies end of index if idx.location == 0: break if not self.__is_entry_valid(idx): - raise RuntimeError("Corrupt GSD file: " + str(self.__file)) + raise RuntimeError('Corrupt GSD file: ' + str(self.__file)) if i > 0 and idx.frame < self.__index[i - 1].frame: - raise RuntimeError("Corrupt GSD file: " + str(self.__file)) + raise RuntimeError('Corrupt GSD file: ' + str(self.__file)) self.__index.append(idx) @@ -236,7 +231,7 @@ def _find_chunk(self, frame, name): R = len(self.__index) # progressively narrow the search window by halves - while (R - L > 1): + while R - L > 1: m = (L + R) // 2 if frame < self.__index[m].frame: @@ -275,7 +270,7 @@ def chunk_exists(self, frame, name): return None """ if not self.__is_open: - msg = "File is not open" + msg = 'File is not open' raise ValueError(msg) chunk = self._find_chunk(frame, name) @@ -319,24 +314,37 @@ def read_chunk(self, frame, name): arrays instead. """ if not self.__is_open: - msg = "File is not open" + msg = 'File is not open' raise ValueError(msg) chunk = self._find_chunk(frame, name) if chunk is None: - raise KeyError("frame " + str(frame) + " / chunk " + name - + " not found in: " + str(self.__file)) - - logger.debug('read chunk: ' + str(self.__file) + ' - ' + str(frame) - + ' - ' + name) + raise KeyError( + 'frame ' + + str(frame) + + ' / chunk ' + + name + + ' not found in: ' + + str(self.__file) + ) + + logger.debug( + 'read chunk: ' + str(self.__file) + ' - ' + str(frame) + ' - ' + name + ) size = chunk.N * chunk.M * gsd_type_mapping[chunk.type].itemsize if chunk.location == 0: - raise RuntimeError("Corrupt chunk: " + str(frame) + " / " + name - + " in file" + str(self.__file)) - - if (size == 0): + raise RuntimeError( + 'Corrupt chunk: ' + + str(frame) + + ' / ' + + name + + ' in file' + + str(self.__file) + ) + + if size == 0: return numpy.array([], dtype=gsd_type_mapping[chunk.type]) self.__file.seek(chunk.location, 0) @@ -345,8 +353,7 @@ def read_chunk(self, frame, name): if len(data_raw) != size: raise OSError - data_npy = numpy.frombuffer(data_raw, - dtype=gsd_type_mapping[chunk.type]) + data_npy = numpy.frombuffer(data_raw, dtype=gsd_type_mapping[chunk.type]) if chunk.M == 1: return data_npy @@ -407,7 +414,7 @@ def gsd_version(self): The tuple is in the order (major, minor). """ v = self.__header.gsd_version - return (v >> 16, v & 0xffff) + return (v >> 16, v & 0xFFFF) @property def schema_version(self): @@ -416,7 +423,7 @@ def schema_version(self): The tuple is in the order (major, minor). """ v = self.__header.schema_version - return (v >> 16, v & 0xffff) + return (v >> 16, v & 0xFFFF) @property def schema(self): @@ -432,7 +439,7 @@ def application(self): def nframes(self): """int: Number of frames in the file.""" if not self.__is_open: - msg = "File is not open" + msg = 'File is not open' raise ValueError(msg) if len(self.__index) == 0: diff --git a/gsd/pytest_plugin_validate.py b/gsd/pytest_plugin_validate.py index d3c5c92e..608c8c34 100644 --- a/gsd/pytest_plugin_validate.py +++ b/gsd/pytest_plugin_validate.py @@ -12,10 +12,10 @@ def pytest_addoption(parser): * validate - run validation tests """ parser.addoption( - "--validate", - action="store_true", + '--validate', + action='store_true', default=False, - help="Enable long running validation tests.", + help='Enable long running validation tests.', ) @@ -26,11 +26,12 @@ def _skip_validate(request): Pass the command line option --validate to enable these tests. """ if request.node.get_closest_marker('validate'): - if not request.config.getoption("validate"): + if not request.config.getoption('validate'): pytest.skip('Validation tests not requested.') def pytest_configure(config): """Define the ``validate`` marker.""" config.addinivalue_line( - "markers", "validate: Tests that perform long-running validations.") + 'markers', 'validate: Tests that perform long-running validations.' + ) diff --git a/gsd/test/test_fl.py b/gsd/test/test_fl.py index 3b00b4b3..f3d94294 100644 --- a/gsd/test/test_fl.py +++ b/gsd/test/test_fl.py @@ -21,52 +21,63 @@ def test_create(tmp_path, open_mode): """Test creation of GSD files.""" - gsd.fl.open(mode=open_mode.write, - name=tmp_path / "test_create.gsd", - application="test_create", - schema="none", - schema_version=[1, 2]) - - -@pytest.mark.parametrize('typ', [ - numpy.uint8, - numpy.uint16, - numpy.uint32, - numpy.uint64, - numpy.int8, - numpy.int16, - numpy.int32, - numpy.int64, - numpy.float32, - numpy.float64, -]) + gsd.fl.open( + mode=open_mode.write, + name=tmp_path / 'test_create.gsd', + application='test_create', + schema='none', + schema_version=[1, 2], + ) + + +@pytest.mark.parametrize( + 'typ', + [ + numpy.uint8, + numpy.uint16, + numpy.uint32, + numpy.uint64, + numpy.int8, + numpy.int16, + numpy.int32, + numpy.int64, + numpy.float32, + numpy.float64, + ], +) def test_dtype(tmp_path, typ): """Test all supported data types.""" data1d = numpy.array([1, 2, 3, 4, 5, 127], dtype=typ) data2d = numpy.array([[10, 20], [30, 40], [50, 80]], dtype=typ) data_zero = numpy.array([], dtype=typ) - gsd.fl.open(mode='x', - name=tmp_path / "test_dtype.gsd", - application="test_dtype", - schema="none", - schema_version=[1, 2]) - - with gsd.fl.open(name=tmp_path / "test_dtype.gsd", - mode='w', - application="test_dtype", - schema="none", - schema_version=[1, 2]) as f: + gsd.fl.open( + mode='x', + name=tmp_path / 'test_dtype.gsd', + application='test_dtype', + schema='none', + schema_version=[1, 2], + ) + + with gsd.fl.open( + name=tmp_path / 'test_dtype.gsd', + mode='w', + application='test_dtype', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='data1d', data=data1d) f.write_chunk(name='data2d', data=data2d) f.write_chunk(name='data_zero', data=data_zero) f.end_frame() - with gsd.fl.open(name=tmp_path / "test_dtype.gsd", - mode='r', - application="test_dtype", - schema="none", - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_dtype.gsd', + mode='r', + application='test_dtype', + schema='none', + schema_version=[1, 2], + ) as f: read_data1d = f.read_chunk(frame=0, name='data1d') read_data2d = f.read_chunk(frame=0, name='data2d') read_data_zero = f.read_chunk(frame=0, name='data_zero') @@ -79,8 +90,7 @@ def test_dtype(tmp_path, typ): assert data_zero.shape == (0,) # test again with pygsd - with gsd.pygsd.GSDFile( - file=open(str(tmp_path / "test_dtype.gsd"), mode='rb')) as f: + with gsd.pygsd.GSDFile(file=open(str(tmp_path / 'test_dtype.gsd'), mode='rb')) as f: read_data1d = f.read_chunk(frame=0, name='data1d') read_data2d = f.read_chunk(frame=0, name='data2d') @@ -94,21 +104,25 @@ def test_metadata(tmp_path, open_mode): """Test file metadata.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test_metadata.gsd', - mode=open_mode.write, - application='test_metadata', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_metadata.gsd', + mode=open_mode.write, + application='test_metadata', + schema='none', + schema_version=[1, 2], + ) as f: assert f.mode == open_mode.write for _i in range(150): f.write_chunk(name='data', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_metadata.gsd', - mode=open_mode.read, - application='test_metadata', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_metadata.gsd', + mode=open_mode.read, + application='test_metadata', + schema='none', + schema_version=[1, 2], + ) as f: assert f.name == str(tmp_path / 'test_metadata.gsd') assert f.mode == open_mode.read assert f.application == 'test_metadata' @@ -119,7 +133,8 @@ def test_metadata(tmp_path, open_mode): # test again with pygsd with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_metadata.gsd'), mode='rb')) as f: + file=open(str(tmp_path / 'test_metadata.gsd'), mode='rb') + ) as f: assert f.name == str(tmp_path / 'test_metadata.gsd') assert f.mode == 'r' assert f.application == 'test_metadata' @@ -131,21 +146,25 @@ def test_metadata(tmp_path, open_mode): def test_append(tmp_path, open_mode): """Test that data chunks can be appended to existing files.""" - with gsd.fl.open(name=tmp_path / 'test_append.gsd', - mode=open_mode.write, - application='test_append', - schema='none', - schema_version=[1, 2]): + with gsd.fl.open( + name=tmp_path / 'test_append.gsd', + mode=open_mode.write, + application='test_append', + schema='none', + schema_version=[1, 2], + ): pass data = numpy.array([10], dtype=numpy.int64) nframes = 1024 - with gsd.fl.open(name=tmp_path / 'test_append.gsd', - mode='a', - application='test_append', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_append.gsd', + mode='a', + application='test_append', + schema='none', + schema_version=[1, 2], + ) as f: assert f.mode == 'a' for i in range(nframes): data[0] = i @@ -154,11 +173,13 @@ def test_append(tmp_path, open_mode): f.write_chunk(name='data10', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_append.gsd', - mode=open_mode.read, - application='test_append', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_append.gsd', + mode=open_mode.read, + application='test_append', + schema='none', + schema_version=[1, 2], + ) as f: assert f.nframes == nframes for i in range(nframes): data1 = f.read_chunk(frame=i, name='data1') @@ -168,7 +189,8 @@ def test_append(tmp_path, open_mode): # test again with pygsd with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_append.gsd'), mode='rb')) as f: + file=open(str(tmp_path / 'test_append.gsd'), mode='rb') + ) as f: assert f.nframes == nframes for i in range(nframes): data1 = f.read_chunk(frame=i, name='data1') @@ -180,11 +202,13 @@ def test_append(tmp_path, open_mode): def test_chunk_exists(tmp_path, open_mode): """Test the chunk_exists API.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test_chunk_exists.gsd', - mode=open_mode.write, - application='test_chunk_exists', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_chunk_exists.gsd', + mode=open_mode.write, + application='test_chunk_exists', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() f.write_chunk(name='abcdefg', data=data) @@ -192,11 +216,13 @@ def test_chunk_exists(tmp_path, open_mode): f.write_chunk(name='test', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_chunk_exists.gsd', - mode=open_mode.read, - application='test_chunk_exists', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_chunk_exists.gsd', + mode=open_mode.read, + application='test_chunk_exists', + schema='none', + schema_version=[1, 2], + ) as f: assert f.chunk_exists(frame=0, name='chunk1') read_data = f.read_chunk(frame=0, name='chunk1') assert f.chunk_exists(frame=1, name='abcdefg') @@ -226,7 +252,8 @@ def test_chunk_exists(tmp_path, open_mode): # test again with pygsd with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_chunk_exists.gsd'), mode='rb')) as f: + file=open(str(tmp_path / 'test_chunk_exists.gsd'), mode='rb') + ) as f: assert f.chunk_exists(frame=0, name='chunk1') read_data = f.read_chunk(frame=0, name='chunk1') assert f.chunk_exists(frame=1, name='abcdefg') @@ -258,21 +285,25 @@ def test_chunk_exists(tmp_path, open_mode): def test_readonly_errors(tmp_path, open_mode): """Test that read only files provide the appropriate errors.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test_readonly_errors.gsd', - mode=open_mode.write, - application='test_readonly_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_readonly_errors.gsd', + mode=open_mode.write, + application='test_readonly_errors', + schema='none', + schema_version=[1, 2], + ) as f: for _i in range(10): f.write_chunk(name='chunk1', data=data) f.end_frame() data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test_readonly_errors.gsd', - mode='r', - application='test_readonly_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_readonly_errors.gsd', + mode='r', + application='test_readonly_errors', + schema='none', + schema_version=[1, 2], + ) as f: with pytest.raises(RuntimeError): f.end_frame() @@ -281,8 +312,8 @@ def test_readonly_errors(tmp_path, open_mode): # test again with pygsd with gsd.pygsd.GSDFile( - file=open(str(tmp_path - / 'test_readonly_errors.gsd'), mode='rb')) as f: + file=open(str(tmp_path / 'test_readonly_errors.gsd'), mode='rb') + ) as f: with pytest.raises(NotImplementedError): f.end_frame() @@ -293,67 +324,79 @@ def test_readonly_errors(tmp_path, open_mode): def test_fileio_errors(tmp_path, open_mode): """Test that OS file I/O errors pass through.""" # These test cause python to crash on windows.... - if platform.system() != "Windows": + if platform.system() != 'Windows': with pytest.raises(FileNotFoundError): - gsd.fl.open(name='/this/file/does/not/exist', - mode='r', - application='test_readonly_errors', - schema='none', - schema_version=[1, 2]) + gsd.fl.open( + name='/this/file/does/not/exist', + mode='r', + application='test_readonly_errors', + schema='none', + schema_version=[1, 2], + ) with open(str(tmp_path / 'test_fileio_errors.gsd'), 'wb') as f: f.write(b'test') with pytest.raises(RuntimeError): - f = gsd.fl.open(name=tmp_path / 'test_fileio_errors.gsd', - mode=open_mode.read, - application='test_readonly_errors', - schema='none', - schema_version=[1, 2]) + f = gsd.fl.open( + name=tmp_path / 'test_fileio_errors.gsd', + mode=open_mode.read, + application='test_readonly_errors', + schema='none', + schema_version=[1, 2], + ) def test_dtype_errors(tmp_path, open_mode): """Test that unsupported data types result in errors.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.bool_) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors1.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_dtype_errors1.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2], + ) as f: with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) f.end_frame() data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.float16) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors2.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_dtype_errors2.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2], + ) as f: with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) f.end_frame() data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex64) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors3.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_dtype_errors3.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2], + ) as f: with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) f.end_frame() data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex128) - with gsd.fl.open(name=tmp_path / 'test_dtype_errors4.gsd', - mode=open_mode.write, - application='test_dtype_errors', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_dtype_errors4.gsd', + mode=open_mode.write, + application='test_dtype_errors', + schema='none', + schema_version=[1, 2], + ) as f: with pytest.raises(ValueError): f.write_chunk(name='chunk1', data=data) @@ -362,13 +405,16 @@ def test_dtype_errors(tmp_path, open_mode): def test_truncate(tmp_path): """Test that the truncate method functions.""" - data = numpy.ascontiguousarray(numpy.random.random(size=(1000, 3)), - dtype=numpy.float32) - with gsd.fl.open(name=tmp_path / 'test_truncate.gsd', - mode='w', - application='test_truncate', - schema='none', - schema_version=[1, 2]) as f: + data = numpy.ascontiguousarray( + numpy.random.random(size=(1000, 3)), dtype=numpy.float32 + ) + with gsd.fl.open( + name=tmp_path / 'test_truncate.gsd', + mode='w', + application='test_truncate', + schema='none', + schema_version=[1, 2], + ) as f: assert f.mode == 'w' for _i in range(10): f.write_chunk(name='data', data=data) @@ -385,11 +431,13 @@ def test_truncate(tmp_path): f.write_chunk(name='data', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_truncate.gsd', - mode='r', - application='test_truncate', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_truncate.gsd', + mode='r', + application='test_truncate', + schema='none', + schema_version=[1, 2], + ) as f: assert f.name == str(tmp_path / 'test_truncate.gsd') assert f.mode == 'r' assert f.application == 'test_truncate' @@ -404,11 +452,13 @@ def test_namelen(tmp_path, open_mode): schema_long = 'ijklmnop' * 100 chunk_long = '12345678' * 100 - with gsd.fl.open(name=tmp_path / 'test_namelen.gsd', - mode=open_mode.write, - application=app_long, - schema=schema_long, - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_namelen.gsd', + mode=open_mode.write, + application=app_long, + schema=schema_long, + schema_version=[1, 2], + ) as f: assert f.application == app_long[0:63] assert f.schema == schema_long[0:63] @@ -416,17 +466,20 @@ def test_namelen(tmp_path, open_mode): f.write_chunk(name=chunk_long, data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_namelen.gsd', - mode=open_mode.read, - application=app_long, - schema=schema_long, - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_namelen.gsd', + mode=open_mode.read, + application=app_long, + schema=schema_long, + schema_version=[1, 2], + ) as f: data_read = f.read_chunk(0, name=chunk_long) numpy.testing.assert_array_equal(data, data_read) # test again with pygsd with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_namelen.gsd'), mode='rb')) as f: + file=open(str(tmp_path / 'test_namelen.gsd'), mode='rb') + ) as f: data_read = f.read_chunk(0, name=chunk_long) numpy.testing.assert_array_equal(data, data_read) @@ -435,61 +488,75 @@ def test_open(tmp_path): """Test the open() API.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='x', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='x', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_2.gsd', - mode='x', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_2.gsd', + mode='x', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() f.read_chunk(0, name='chunk1') - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='w', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='w', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='w', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='w', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() f.read_chunk(0, name='chunk1') - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='a', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='a', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='r', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='r', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.read_chunk(0, name='chunk1') f.read_chunk(1, name='chunk1') - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode='r+', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode='r+', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() f.read_chunk(0, name='chunk1') @@ -501,22 +568,26 @@ def test_find_matching_chunk_names(tmp_path, open_mode): """Test the find_matching_chunk_names API.""" data = numpy.array([1, 2, 3, 4, 5], dtype=numpy.float32) - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode=open_mode.write, - application='test_find_matching_chunk_names', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode=open_mode.write, + application='test_find_matching_chunk_names', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='log/A', data=data) f.write_chunk(name='log/chunk2', data=data) f.end_frame() f.write_chunk(name='data/B', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode=open_mode.read, - application='test_find_matching_chunk_names', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode=open_mode.read, + application='test_find_matching_chunk_names', + schema='none', + schema_version=[1, 2], + ) as f: all_chunks = f.find_matching_chunk_names('') assert len(all_chunks) == 3 assert 'log/A' in all_chunks @@ -536,8 +607,7 @@ def test_find_matching_chunk_names(tmp_path, open_mode): assert len(other_chunks) == 0 # test again with pygsd - with gsd.pygsd.GSDFile(file=open(str(tmp_path - / "test.gsd"), mode='rb')) as f: + with gsd.pygsd.GSDFile(file=open(str(tmp_path / 'test.gsd'), mode='rb')) as f: all_chunks = f.find_matching_chunk_names('') assert len(all_chunks) == 3 assert 'log/A' in all_chunks @@ -559,42 +629,47 @@ def test_find_matching_chunk_names(tmp_path, open_mode): def test_chunk_name_limit(tmp_path, open_mode): """Test that providing more than the maximum allowed chunk names errors.""" - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode=open_mode.write, - application='test_chunk_name_limit', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode=open_mode.write, + application='test_chunk_name_limit', + schema='none', + schema_version=[1, 2], + ) as f: for i in range(65535): f.write_chunk(name=str(i), data=numpy.array([i], dtype=numpy.int32)) # The GSD specification limits to 65535 names: with pytest.raises(RuntimeError): - f.write_chunk(name='65536', - data=numpy.array([i], dtype=numpy.int32)) + f.write_chunk(name='65536', data=numpy.array([i], dtype=numpy.int32)) def test_many_names(tmp_path, open_mode): """Test that many chunk names can be written to a file.""" values = list(range(1000)) - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode=open_mode.write, - application='test_many_names', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode=open_mode.write, + application='test_many_names', + schema='none', + schema_version=[1, 2], + ) as f: for _ in range(5): random.shuffle(values) for value in values: - f.write_chunk(name=str(value), - data=numpy.array([value * 13], dtype=numpy.int32)) + f.write_chunk( + name=str(value), data=numpy.array([value * 13], dtype=numpy.int32) + ) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test.gsd', - mode=open_mode.read, - application='test_many_names', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test.gsd', + mode=open_mode.read, + application='test_many_names', + schema='none', + schema_version=[1, 2], + ) as f: for frame in range(5): random.shuffle(values) for value in values: @@ -602,8 +677,7 @@ def test_many_names(tmp_path, open_mode): data_read = f.read_chunk(frame=frame, name=str(value)) numpy.testing.assert_array_equal(data, data_read) - with gsd.pygsd.GSDFile(file=open(str(tmp_path - / 'test.gsd'), mode='rb')) as f: + with gsd.pygsd.GSDFile(file=open(str(tmp_path / 'test.gsd'), mode='rb')) as f: for frame in range(5): random.shuffle(values) for value in values: @@ -636,18 +710,19 @@ def check_v1_file_read(f): assert chunk_names == values_str # test with the C implemantation - with gsd.fl.open(name=test_path / 'test_gsd_v1.gsd', - mode='r', - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=test_path / 'test_gsd_v1.gsd', + mode='r', + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: check_v1_file_read(f) # and the pure Python implementation with gsd.pygsd.GSDFile( - file=open(str(test_path / 'test_gsd_v1.gsd'), mode='rb')) as f: - + file=open(str(test_path / 'test_gsd_v1.gsd'), mode='rb') + ) as f: assert f.gsd_version == (1, 0) check_v1_file_read(f) @@ -676,12 +751,13 @@ def check_v1_file_read(f): shutil.copy(test_path / 'test_gsd_v1.gsd', tmp_path / 'test_gsd_v1.gsd') - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode='r+', - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode='r+', + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: assert f.gsd_version == (1, 0) f.upgrade() @@ -690,19 +766,20 @@ def check_v1_file_read(f): check_v1_file_read(f) # and the same tests again after closing and opening the file - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode=open_mode.read, - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode=open_mode.read, + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: assert f.gsd_version == (2, 0) check_v1_file_read(f) with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f: - + file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb') + ) as f: assert f.gsd_version == (2, 0) check_v1_file_read(f) @@ -754,12 +831,13 @@ def check_v1_file_read(f): numpy.testing.assert_array_equal(data, data_read) # test that we can write new entries to the file - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode='r+', - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode='r+', + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: assert f.gsd_version == (1, 0) for value in values: @@ -773,18 +851,19 @@ def check_v1_file_read(f): check_v1_file_read(f) # test opening again with the C implemantation - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode=open_mode.read, - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode=open_mode.read, + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: check_v1_file_read(f) # and the pure Python implementation with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f: - + file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb') + ) as f: assert f.gsd_version == (1, 0) check_v1_file_read(f) @@ -823,12 +902,13 @@ def check_v1_file_read(f): numpy.testing.assert_array_equal(data, data_read) # test that we can write new entries to the file - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode='r+', - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode='r+', + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: assert f.gsd_version == (1, 0) f.upgrade() @@ -846,20 +926,21 @@ def check_v1_file_read(f): check_v1_file_read(f) # test opening again with the C implemantation - with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd', - mode=open_mode.read, - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_gsd_v1.gsd', + mode=open_mode.read, + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ) as f: assert f.gsd_version == (2, 0) check_v1_file_read(f) # and the pure Python implementation with gsd.pygsd.GSDFile( - file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f: - + file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb') + ) as f: assert f.gsd_version == (2, 0) check_v1_file_read(f) @@ -869,47 +950,52 @@ def test_zero_size(tmp_path, open_mode): """Test that zero-size data chunks are allowed.""" data = numpy.array([], dtype=numpy.float32) - with gsd.fl.open(name=tmp_path / 'test_zero.gsd', - mode=open_mode.write, - application='test_zero', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_zero.gsd', + mode=open_mode.write, + application='test_zero', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='data', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_zero.gsd', - mode=open_mode.read, - application='test_zero', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_zero.gsd', + mode=open_mode.read, + application='test_zero', + schema='none', + schema_version=[1, 2], + ) as f: assert f.nframes == 1 data_read = f.read_chunk(frame=0, name='data') assert data_read.shape == (0,) assert data_read.dtype == numpy.float32 # test again with pygsd - with gsd.pygsd.GSDFile(file=open(str(tmp_path - / 'test_zero.gsd'), mode='rb')) as f: + with gsd.pygsd.GSDFile(file=open(str(tmp_path / 'test_zero.gsd'), mode='rb')) as f: assert f.nframes == 1 data_read = f.read_chunk(frame=0, name='data') assert data_read.shape == (0,) assert data_read.dtype == numpy.float32 -@pytest.mark.skipif(sys.version_info < (3, 7), - reason="Python 3.6 fails to handle non-ascii characters.") +@pytest.mark.skipif( + sys.version_info < (3, 7), reason='Python 3.6 fails to handle non-ascii characters.' +) def test_utf8(tmp_path): """Test that the API handles UTF-8 encoding for the filename.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) fname = '中文.gsd' - with gsd.fl.open(name=tmp_path / fname, - mode='x', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / fname, + mode='x', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() @@ -917,19 +1003,23 @@ def test_utf8(tmp_path): print(dir_list) assert fname in dir_list - with gsd.fl.open(name=tmp_path / fname, - mode='w', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / fname, + mode='w', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / fname, - mode='r', - application='test_open', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / fname, + mode='r', + application='test_open', + schema='none', + schema_version=[1, 2], + ) as f: f.read_chunk(0, name='chunk1') @@ -937,21 +1027,25 @@ def test_utf8(tmp_path): def test_read_write(tmp_path, mode): """Test that data chunks can read from files opened in all write modes.""" if mode[0] == 'r' or mode[0] == 'a': - with gsd.fl.open(name=tmp_path / 'test_read_write.gsd', - mode='w', - application='test_read_write', - schema='none', - schema_version=[1, 2]): + with gsd.fl.open( + name=tmp_path / 'test_read_write.gsd', + mode='w', + application='test_read_write', + schema='none', + schema_version=[1, 2], + ): pass data = numpy.array([10], dtype=numpy.int64) nframes = 1024 - with gsd.fl.open(name=tmp_path / 'test_read_write.gsd', - mode=mode, - application='test_read_write', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_read_write.gsd', + mode=mode, + application='test_read_write', + schema='none', + schema_version=[1, 2], + ) as f: assert f.mode == mode for i in range(nframes): data[0] = i @@ -971,11 +1065,13 @@ def test_read_write(tmp_path, mode): def test_flush(tmp_path, open_mode, n_flush): """Test flush.""" data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64) - with gsd.fl.open(name=tmp_path / 'test_flush.gsd', - mode=open_mode.write, - application='test_flush', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_flush.gsd', + mode=open_mode.write, + application='test_flush', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='chunk1', data=data) f.end_frame() f.write_chunk(name='chunk2', data=data) @@ -984,8 +1080,7 @@ def test_flush(tmp_path, open_mode, n_flush): # Ensure that the data is buffered by opening the file with a 2nd # handle read-only and checking it. - with gsd.fl.open(name=tmp_path / 'test_flush.gsd', - mode='r') as f_readonly: + with gsd.fl.open(name=tmp_path / 'test_flush.gsd', mode='r') as f_readonly: assert not f_readonly.chunk_exists(frame=0, name='chunk1') assert not f_readonly.chunk_exists(frame=1, name='chunk2') assert f_readonly.nframes == 0 @@ -995,8 +1090,7 @@ def test_flush(tmp_path, open_mode, n_flush): for _i in range(n_flush): f.flush() - with gsd.fl.open(name=tmp_path / 'test_flush.gsd', - mode=open_mode.read) as f: + with gsd.fl.open(name=tmp_path / 'test_flush.gsd', mode=open_mode.read) as f: assert f.chunk_exists(frame=0, name='chunk1') assert f.chunk_exists(frame=1, name='chunk2') @@ -1009,12 +1103,13 @@ def test_flush(tmp_path, open_mode, n_flush): def test_maximum_write_buffer_size(tmp_path, open_mode): """Test maximum_write_buffer_size.""" - with gsd.fl.open(name=tmp_path / 'test_maximum_write_buffer_size.gsd', - mode=open_mode.write, - application='test_maximum_write_buffer_size', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_maximum_write_buffer_size.gsd', + mode=open_mode.write, + application='test_maximum_write_buffer_size', + schema='none', + schema_version=[1, 2], + ) as f: assert f.maximum_write_buffer_size > 0 f.maximum_write_buffer_size = 1024 assert f.maximum_write_buffer_size == 1024 @@ -1025,12 +1120,13 @@ def test_maximum_write_buffer_size(tmp_path, open_mode): def test_index_entries_to_buffer(tmp_path, open_mode): """Test index_entries_to_buffer.""" - with gsd.fl.open(name=tmp_path / 'test_index_entries_to_buffer.gsd', - mode=open_mode.write, - application='test_index_entries_to_buffer', - schema='none', - schema_version=[1, 2]) as f: - + with gsd.fl.open( + name=tmp_path / 'test_index_entries_to_buffer.gsd', + mode=open_mode.write, + application='test_index_entries_to_buffer', + schema='none', + schema_version=[1, 2], + ) as f: assert f.index_entries_to_buffer > 0 f.index_entries_to_buffer = 1024 assert f.index_entries_to_buffer == 1024 @@ -1042,9 +1138,11 @@ def test_index_entries_to_buffer(tmp_path, open_mode): def test_file_exists_error(): """Test that IO errors throw the correct Python Excetion.""" with pytest.raises(FileExistsError): - with gsd.fl.open(name=test_path / 'test_gsd_v1.gsd', - mode='x', - application='test_gsd_v1', - schema='none', - schema_version=[1, 2]): + with gsd.fl.open( + name=test_path / 'test_gsd_v1.gsd', + mode='x', + application='test_gsd_v1', + schema='none', + schema_version=[1, 2], + ): pass diff --git a/gsd/test/test_hoomd.py b/gsd/test/test_hoomd.py index 21488e99..94aeb580 100644 --- a/gsd/test/test_hoomd.py +++ b/gsd/test/test_hoomd.py @@ -14,7 +14,7 @@ def test_create(tmp_path): """Test that gsd files can be created.""" - with gsd.hoomd.open(name=tmp_path / "test_create.gsd", mode='w') as hf: + with gsd.hoomd.open(name=tmp_path / 'test_create.gsd', mode='w') as hf: assert hf.file.schema == 'hoomd' assert hf.file.schema_version >= (1, 0) @@ -24,14 +24,12 @@ def test_append(tmp_path, open_mode): frame = gsd.hoomd.Frame() frame.particles.N = 10 - with gsd.hoomd.open(name=tmp_path / "test_append.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_append.gsd', mode=open_mode.write) as hf: for i in range(5): frame.configuration.step = i + 1 hf.append(frame) - with gsd.hoomd.open(name=tmp_path / "test_append.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_append.gsd', mode=open_mode.read) as hf: assert len(hf) == 5 @@ -40,15 +38,14 @@ def test_flush(tmp_path, open_mode): frame = gsd.hoomd.Frame() frame.particles.N = 10 - hf = gsd.hoomd.open(name=tmp_path / "test_append.gsd", mode=open_mode.write) + hf = gsd.hoomd.open(name=tmp_path / 'test_append.gsd', mode=open_mode.write) for i in range(5): frame.configuration.step = i + 1 hf.append(frame) hf.flush() - with gsd.hoomd.open(name=tmp_path / "test_append.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_append.gsd', mode=open_mode.read) as hf: assert len(hf) == 5 @@ -64,12 +61,10 @@ def test_extend(tmp_path, open_mode): frame = gsd.hoomd.Frame() frame.particles.N = 10 - with gsd.hoomd.open(name=tmp_path / "test_extend.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_extend.gsd', mode=open_mode.write) as hf: hf.extend(create_frame(i) for i in range(5)) - with gsd.hoomd.open(name=tmp_path / "test_extend.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_extend.gsd', mode=open_mode.read) as hf: assert len(hf) == 5 @@ -84,103 +79,132 @@ def test_defaults(tmp_path, open_mode): frame.constraints.N = 4 frame.pairs.N = 7 - with gsd.hoomd.open(name=tmp_path / "test_defaults.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_defaults.gsd', mode=open_mode.write + ) as hf: hf.append(frame) - with gsd.hoomd.open(name=tmp_path / "test_defaults.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_defaults.gsd', mode=open_mode.read) as hf: s = hf[0] assert s.configuration.step == 0 assert s.configuration.dimensions == 3 numpy.testing.assert_array_equal( - s.configuration.box, - numpy.array([1, 1, 1, 0, 0, 0], dtype=numpy.float32)) + s.configuration.box, numpy.array([1, 1, 1, 0, 0, 0], dtype=numpy.float32) + ) assert s.particles.N == 2 assert s.particles.types == ['A'] assert s.particles.type_shapes == [{}] numpy.testing.assert_array_equal( - s.particles.typeid, numpy.array([0, 0], dtype=numpy.uint32)) + s.particles.typeid, numpy.array([0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.particles.mass, numpy.array([1, 1], dtype=numpy.float32)) + s.particles.mass, numpy.array([1, 1], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( - s.particles.diameter, numpy.array([1, 1], dtype=numpy.float32)) + s.particles.diameter, numpy.array([1, 1], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( - s.particles.body, numpy.array([-1, -1], dtype=numpy.int32)) + s.particles.body, numpy.array([-1, -1], dtype=numpy.int32) + ) numpy.testing.assert_array_equal( - s.particles.charge, numpy.array([0, 0], dtype=numpy.float32)) + s.particles.charge, numpy.array([0, 0], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( s.particles.moment_inertia, - numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.position, - numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.orientation, - numpy.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=numpy.float32)) + numpy.array([[1, 0, 0, 0], [1, 0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.velocity, - numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.angmom, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0, 0], [0, 0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( - s.particles.image, - numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.int32)) + s.particles.image, numpy.array([[0, 0, 0], [0, 0, 0]], dtype=numpy.int32) + ) assert s.bonds.N == 3 assert s.bonds.types == [] numpy.testing.assert_array_equal( - s.bonds.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32)) + s.bonds.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.bonds.group, - numpy.array([[0, 0], [0, 0], [0, 0]], dtype=numpy.uint32)) + s.bonds.group, numpy.array([[0, 0], [0, 0], [0, 0]], dtype=numpy.uint32) + ) assert s.angles.N == 4 assert s.angles.types == [] numpy.testing.assert_array_equal( - s.angles.typeid, numpy.array([0, 0, 0, 0], dtype=numpy.uint32)) + s.angles.typeid, numpy.array([0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.angles.group, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.uint32 + ), + ) assert s.dihedrals.N == 5 assert s.dihedrals.types == [] numpy.testing.assert_array_equal( - s.dihedrals.typeid, numpy.array([0, 0, 0, 0, 0], - dtype=numpy.uint32)) + s.dihedrals.typeid, numpy.array([0, 0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.dihedrals.group, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], - [0, 0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], + dtype=numpy.uint32, + ), + ) assert s.impropers.N == 6 assert s.impropers.types == [] numpy.testing.assert_array_equal( - s.impropers.typeid, - numpy.array([0, 0, 0, 0, 0, 0], dtype=numpy.uint32)) + s.impropers.typeid, numpy.array([0, 0, 0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.impropers.group, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], - [0, 0, 0, 0], [0, 0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [ + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + ], + dtype=numpy.uint32, + ), + ) assert s.constraints.N == 4 numpy.testing.assert_array_equal( - s.constraints.value, numpy.array([0, 0, 0, 0], dtype=numpy.float32)) + s.constraints.value, numpy.array([0, 0, 0, 0], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( s.constraints.group, - numpy.array([[0, 0], [0, 0], [0, 0], [0, 0]], dtype=numpy.uint32)) + numpy.array([[0, 0], [0, 0], [0, 0], [0, 0]], dtype=numpy.uint32), + ) assert s.pairs.N == 7 assert s.pairs.types == [] numpy.testing.assert_array_equal( - s.pairs.typeid, numpy.array([0] * 7, dtype=numpy.uint32)) + s.pairs.typeid, numpy.array([0] * 7, dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.pairs.group, numpy.array([[0, 0]] * 7, dtype=numpy.uint32)) + s.pairs.group, numpy.array([[0, 0]] * 7, dtype=numpy.uint32) + ) assert len(s.state) == 0 @@ -194,18 +218,9 @@ def make_nondefault_frame(): frame0.particles.N = 2 frame0.particles.types = ['A', 'B', 'C'] frame0.particles.type_shapes = [ - { - "type": "Sphere", - "diameter": 2.0 - }, - { - "type": "Sphere", - "diameter": 3.0 - }, - { - "type": "Sphere", - "diameter": 4.0 - }, + {'type': 'Sphere', 'diameter': 2.0}, + {'type': 'Sphere', 'diameter': 3.0}, + {'type': 'Sphere', 'diameter': 4.0}, ] frame0.particles.typeid = [1, 2] frame0.particles.mass = [2, 3] @@ -258,30 +273,27 @@ def assert_frames_equal(s, frame0, check_position=True, check_step=True): assert s.configuration.step == frame0.configuration.step assert s.configuration.dimensions == frame0.configuration.dimensions - numpy.testing.assert_array_equal(s.configuration.box, - frame0.configuration.box) + numpy.testing.assert_array_equal(s.configuration.box, frame0.configuration.box) assert s.particles.N == frame0.particles.N assert s.particles.types == frame0.particles.types assert s.particles.type_shapes == frame0.particles.type_shapes - numpy.testing.assert_array_equal(s.particles.typeid, - frame0.particles.typeid) + numpy.testing.assert_array_equal(s.particles.typeid, frame0.particles.typeid) numpy.testing.assert_array_equal(s.particles.mass, frame0.particles.mass) - numpy.testing.assert_array_equal(s.particles.diameter, - frame0.particles.diameter) + numpy.testing.assert_array_equal(s.particles.diameter, frame0.particles.diameter) numpy.testing.assert_array_equal(s.particles.body, frame0.particles.body) - numpy.testing.assert_array_equal(s.particles.charge, - frame0.particles.charge) - numpy.testing.assert_array_equal(s.particles.moment_inertia, - frame0.particles.moment_inertia) + numpy.testing.assert_array_equal(s.particles.charge, frame0.particles.charge) + numpy.testing.assert_array_equal( + s.particles.moment_inertia, frame0.particles.moment_inertia + ) if check_position: - numpy.testing.assert_array_equal(s.particles.position, - frame0.particles.position) - numpy.testing.assert_array_equal(s.particles.orientation, - frame0.particles.orientation) - numpy.testing.assert_array_equal(s.particles.velocity, - frame0.particles.velocity) - numpy.testing.assert_array_equal(s.particles.angmom, - frame0.particles.angmom) + numpy.testing.assert_array_equal( + s.particles.position, frame0.particles.position + ) + numpy.testing.assert_array_equal( + s.particles.orientation, frame0.particles.orientation + ) + numpy.testing.assert_array_equal(s.particles.velocity, frame0.particles.velocity) + numpy.testing.assert_array_equal(s.particles.angmom, frame0.particles.angmom) numpy.testing.assert_array_equal(s.particles.image, frame0.particles.image) assert s.bonds.N == frame0.bonds.N @@ -296,21 +308,17 @@ def assert_frames_equal(s, frame0, check_position=True, check_step=True): assert s.dihedrals.N == frame0.dihedrals.N assert s.dihedrals.types == frame0.dihedrals.types - numpy.testing.assert_array_equal(s.dihedrals.typeid, - frame0.dihedrals.typeid) + numpy.testing.assert_array_equal(s.dihedrals.typeid, frame0.dihedrals.typeid) numpy.testing.assert_array_equal(s.dihedrals.group, frame0.dihedrals.group) assert s.impropers.N == frame0.impropers.N assert s.impropers.types == frame0.impropers.types - numpy.testing.assert_array_equal(s.impropers.typeid, - frame0.impropers.typeid) + numpy.testing.assert_array_equal(s.impropers.typeid, frame0.impropers.typeid) numpy.testing.assert_array_equal(s.impropers.group, frame0.impropers.group) assert s.constraints.N == frame0.constraints.N - numpy.testing.assert_array_equal(s.constraints.value, - frame0.constraints.value) - numpy.testing.assert_array_equal(s.constraints.group, - frame0.constraints.group) + numpy.testing.assert_array_equal(s.constraints.value, frame0.constraints.value) + numpy.testing.assert_array_equal(s.constraints.group, frame0.constraints.group) assert s.pairs.N == frame0.pairs.N assert s.pairs.types == frame0.pairs.types @@ -335,8 +343,10 @@ def test_fallback(tmp_path, open_mode): frame2 = gsd.hoomd.Frame() frame2.particles.N = 3 frame2.particles.types = ['q', 's'] - frame2.particles.type_shapes = \ - [{}, {"type": "Ellipsoid", "a": 7.0, "b": 5.0, "c": 3.0}] + frame2.particles.type_shapes = [ + {}, + {'type': 'Ellipsoid', 'a': 7.0, 'b': 5.0, 'c': 3.0}, + ] frame2.bonds.N = 3 frame2.angles.N = 4 frame2.dihedrals.N = 5 @@ -344,12 +354,12 @@ def test_fallback(tmp_path, open_mode): frame2.constraints.N = 4 frame2.pairs.N = 7 - with gsd.hoomd.open(name=tmp_path / "test_fallback.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_fallback.gsd', mode=open_mode.write + ) as hf: hf.extend([frame0, frame1, frame2]) - with gsd.hoomd.open(name=tmp_path / "test_fallback.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_fallback.gsd', mode=open_mode.read) as hf: assert len(hf) == 3 s = hf[0] @@ -372,88 +382,120 @@ def test_fallback(tmp_path, open_mode): assert s.particles.types == ['q', 's'] assert s.particles.type_shapes == frame2.particles.type_shapes numpy.testing.assert_array_equal( - s.particles.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32)) + s.particles.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.particles.mass, numpy.array([1, 1, 1], dtype=numpy.float32)) + s.particles.mass, numpy.array([1, 1, 1], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( - s.particles.diameter, numpy.array([1, 1, 1], dtype=numpy.float32)) + s.particles.diameter, numpy.array([1, 1, 1], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( - s.particles.body, numpy.array([-1, -1, -1], dtype=numpy.float32)) + s.particles.body, numpy.array([-1, -1, -1], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( - s.particles.charge, numpy.array([0, 0, 0], dtype=numpy.float32)) + s.particles.charge, numpy.array([0, 0, 0], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( s.particles.moment_inertia, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.position, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.orientation, - numpy.array([[1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]], - dtype=numpy.float32)) + numpy.array( + [[1, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]], dtype=numpy.float32 + ), + ) numpy.testing.assert_array_equal( s.particles.velocity, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32)) + numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.float32), + ) numpy.testing.assert_array_equal( s.particles.angmom, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], - dtype=numpy.float32)) + numpy.array( + [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], dtype=numpy.float32 + ), + ) numpy.testing.assert_array_equal( s.particles.image, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.int32)) + numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.int32), + ) assert s.bonds.N == 3 assert s.bonds.types == frame0.bonds.types numpy.testing.assert_array_equal( - s.bonds.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32)) + s.bonds.typeid, numpy.array([0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.bonds.group, - numpy.array([[0, 0], [0, 0], [0, 0]], dtype=numpy.uint32)) + s.bonds.group, numpy.array([[0, 0], [0, 0], [0, 0]], dtype=numpy.uint32) + ) assert s.angles.N == 4 assert s.angles.types == frame0.angles.types numpy.testing.assert_array_equal( - s.angles.typeid, numpy.array([0, 0, 0, 0], dtype=numpy.uint32)) + s.angles.typeid, numpy.array([0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.angles.group, - numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], dtype=numpy.uint32 + ), + ) assert s.dihedrals.N == 5 assert s.dihedrals.types == frame0.dihedrals.types numpy.testing.assert_array_equal( - s.dihedrals.typeid, numpy.array([0, 0, 0, 0, 0], - dtype=numpy.uint32)) + s.dihedrals.typeid, numpy.array([0, 0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.dihedrals.group, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], - [0, 0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], + dtype=numpy.uint32, + ), + ) assert s.impropers.N == 6 assert s.impropers.types == frame0.impropers.types numpy.testing.assert_array_equal( - s.impropers.typeid, - numpy.array([0, 0, 0, 0, 0, 0], dtype=numpy.uint32)) + s.impropers.typeid, numpy.array([0, 0, 0, 0, 0, 0], dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( s.impropers.group, - numpy.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], - [0, 0, 0, 0], [0, 0, 0, 0]], - dtype=numpy.uint32)) + numpy.array( + [ + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + ], + dtype=numpy.uint32, + ), + ) assert s.constraints.N == 4 numpy.testing.assert_array_equal( - s.constraints.value, numpy.array([0, 0, 0, 0], dtype=numpy.float32)) + s.constraints.value, numpy.array([0, 0, 0, 0], dtype=numpy.float32) + ) numpy.testing.assert_array_equal( s.constraints.group, - numpy.array([[0, 0], [0, 0], [0, 0], [0, 0]], dtype=numpy.uint32)) + numpy.array([[0, 0], [0, 0], [0, 0], [0, 0]], dtype=numpy.uint32), + ) assert s.pairs.N == 7 assert s.pairs.types == frame0.pairs.types numpy.testing.assert_array_equal( - s.pairs.typeid, numpy.array([0] * 7, dtype=numpy.uint32)) + s.pairs.typeid, numpy.array([0] * 7, dtype=numpy.uint32) + ) numpy.testing.assert_array_equal( - s.pairs.group, numpy.array([[0, 0]] * 7, dtype=numpy.uint32)) + s.pairs.group, numpy.array([[0, 0]] * 7, dtype=numpy.uint32) + ) assert 'value' in s.log numpy.testing.assert_array_equal(s.log['value'], frame0.log['value']) @@ -473,12 +515,14 @@ def test_fallback_to_frame0(tmp_path, open_mode): frame1.constraints.N = None frame1.pairs.N = None - with gsd.hoomd.open(name=tmp_path / "test_fallback2.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_fallback2.gsd', mode=open_mode.write + ) as hf: hf.extend([frame0, frame1]) - with gsd.hoomd.open(name=tmp_path / "test_fallback2.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_fallback2.gsd', mode=open_mode.read + ) as hf: assert len(hf) == 2 s = hf[1] @@ -540,12 +584,14 @@ def test_no_fallback(tmp_path, open_mode): frame1.pairs.typeid = [0] * frame0.pairs.N frame1.pairs.group = [[0, 0]] * frame0.pairs.N - with gsd.hoomd.open(name=tmp_path / "test_no_fallback.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_no_fallback.gsd', mode=open_mode.write + ) as hf: hf.extend([frame0, frame1]) - with gsd.hoomd.open(name=tmp_path / "test_no_fallback.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_no_fallback.gsd', mode=open_mode.read + ) as hf: assert len(hf) == 2 s = hf[1] @@ -555,12 +601,14 @@ def test_no_fallback(tmp_path, open_mode): def test_iteration(tmp_path, open_mode): """Test the iteration protocols for hoomd trajectories.""" - with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_iteration.gsd', mode=open_mode.write + ) as hf: hf.extend(create_frame(i) for i in range(20)) - with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open( + name=tmp_path / 'test_iteration.gsd', mode=open_mode.read + ) as hf: step = hf[-1].configuration.step assert step == 20 @@ -597,12 +645,10 @@ def test_iteration(tmp_path, open_mode): def test_slicing_and_iteration(tmp_path, open_mode): """Test that hoomd trajectories can be sliced.""" - with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_slicing.gsd', mode=open_mode.write) as hf: hf.extend(create_frame(i) for i in range(20)) - with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_slicing.gsd', mode=open_mode.read) as hf: # Test len()-function on trajectory and sliced trajectory. assert len(hf) == 20 assert len(hf[:10]) == 10 @@ -640,12 +686,10 @@ def test_slicing_and_iteration(tmp_path, open_mode): def test_view_slicing_and_iteration(tmp_path, open_mode): """Test that trajectories can be sliced.""" - with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_slicing.gsd', mode=open_mode.write) as hf: hf.extend(create_frame(i) for i in range(40)) - with gsd.hoomd.open(name=tmp_path / "test_slicing.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_slicing.gsd', mode=open_mode.read) as hf: view = hf[::2] # Test len()-function on trajectory and sliced view. @@ -683,13 +727,12 @@ def test_view_slicing_and_iteration(tmp_path, open_mode): with pytest.raises(IndexError): view[len(view)] assert view[0].configuration.step == view[0].configuration.step - assert view[len(view) - - 1].configuration.step == view[-1].configuration.step + assert view[len(view) - 1].configuration.step == view[-1].configuration.step def test_truncate(tmp_path): """Test the truncate API.""" - with gsd.hoomd.open(name=tmp_path / "test_iteration.gsd", mode='w') as hf: + with gsd.hoomd.open(name=tmp_path / 'test_iteration.gsd', mode='w') as hf: hf.extend(create_frame(i) for i in range(20)) assert len(hf) == 20 @@ -711,31 +754,36 @@ def test_state(tmp_path, open_mode): frame1 = gsd.hoomd.Frame() frame1.state['hpmc/convex_polyhedron/N'] = [3] - frame1.state['hpmc/convex_polyhedron/vertices'] = [[-1, -1, -1], [0, 1, 1], - [1, 0, 0]] + frame1.state['hpmc/convex_polyhedron/vertices'] = [ + [-1, -1, -1], + [0, 1, 1], + [1, 0, 0], + ] - with gsd.hoomd.open(name=tmp_path / "test_state.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_state.gsd', mode=open_mode.write) as hf: hf.extend([frame0, frame1]) - with gsd.hoomd.open(name=tmp_path / "test_state.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_state.gsd', mode=open_mode.read) as hf: assert len(hf) == 2 s = hf[0] - numpy.testing.assert_array_equal(s.state['hpmc/sphere/radius'], - frame0.state['hpmc/sphere/radius']) - numpy.testing.assert_array_equal(s.state['hpmc/sphere/orientable'], - frame0.state['hpmc/sphere/orientable']) + numpy.testing.assert_array_equal( + s.state['hpmc/sphere/radius'], frame0.state['hpmc/sphere/radius'] + ) + numpy.testing.assert_array_equal( + s.state['hpmc/sphere/orientable'], frame0.state['hpmc/sphere/orientable'] + ) s = hf[1] numpy.testing.assert_array_equal( s.state['hpmc/convex_polyhedron/N'], - frame1.state['hpmc/convex_polyhedron/N']) + frame1.state['hpmc/convex_polyhedron/N'], + ) numpy.testing.assert_array_equal( s.state['hpmc/convex_polyhedron/vertices'], - frame1.state['hpmc/convex_polyhedron/vertices']) + frame1.state['hpmc/convex_polyhedron/vertices'], + ) def test_log(tmp_path, open_mode): @@ -752,58 +800,63 @@ def test_log(tmp_path, open_mode): frame1.log['particles/pair_lj_energy'] = [1, 2, -4, -10] frame1.log['value/pressure'] = [5] - with gsd.hoomd.open(name=tmp_path / "test_log.gsd", - mode=open_mode.write) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_log.gsd', mode=open_mode.write) as hf: hf.extend([frame0, frame1]) - with gsd.hoomd.open(name=tmp_path / "test_log.gsd", - mode=open_mode.read) as hf: + with gsd.hoomd.open(name=tmp_path / 'test_log.gsd', mode=open_mode.read) as hf: assert len(hf) == 2 s = hf[0] - numpy.testing.assert_array_equal(s.log['particles/net_force'], - frame0.log['particles/net_force']) - numpy.testing.assert_array_equal(s.log['particles/pair_lj_energy'], - frame0.log['particles/pair_lj_energy']) - numpy.testing.assert_array_equal(s.log['value/potential_energy'], - frame0.log['value/potential_energy']) - numpy.testing.assert_array_equal(s.log['value/pressure'], - frame0.log['value/pressure']) + numpy.testing.assert_array_equal( + s.log['particles/net_force'], frame0.log['particles/net_force'] + ) + numpy.testing.assert_array_equal( + s.log['particles/pair_lj_energy'], frame0.log['particles/pair_lj_energy'] + ) + numpy.testing.assert_array_equal( + s.log['value/potential_energy'], frame0.log['value/potential_energy'] + ) + numpy.testing.assert_array_equal( + s.log['value/pressure'], frame0.log['value/pressure'] + ) s = hf[1] # unspecified entries pull from frame 0 - numpy.testing.assert_array_equal(s.log['particles/net_force'], - frame0.log['particles/net_force']) - numpy.testing.assert_array_equal(s.log['value/potential_energy'], - frame0.log['value/potential_energy']) + numpy.testing.assert_array_equal( + s.log['particles/net_force'], frame0.log['particles/net_force'] + ) + numpy.testing.assert_array_equal( + s.log['value/potential_energy'], frame0.log['value/potential_energy'] + ) # specified entries are different in frame 1 - numpy.testing.assert_array_equal(s.log['particles/pair_lj_energy'], - frame1.log['particles/pair_lj_energy']) - numpy.testing.assert_array_equal(s.log['value/pressure'], - frame1.log['value/pressure']) + numpy.testing.assert_array_equal( + s.log['particles/pair_lj_energy'], frame1.log['particles/pair_lj_energy'] + ) + numpy.testing.assert_array_equal( + s.log['value/pressure'], frame1.log['value/pressure'] + ) def test_pickle(tmp_path): """Test that hoomd trajectory objects can be pickled.""" - with gsd.hoomd.open(name=tmp_path / "test_pickling.gsd", mode='w') as traj: + with gsd.hoomd.open(name=tmp_path / 'test_pickling.gsd', mode='w') as traj: traj.extend(create_frame(i) for i in range(20)) with pytest.raises(pickle.PickleError): pkl = pickle.dumps(traj) - with gsd.hoomd.open(name=tmp_path / "test_pickling.gsd", mode='r') as traj: + with gsd.hoomd.open(name=tmp_path / 'test_pickling.gsd', mode='r') as traj: pkl = pickle.dumps(traj) with pickle.loads(pkl) as hf: assert len(hf) == 20 @pytest.mark.parametrize( - 'container', - ['particles', 'bonds', 'angles', 'dihedrals', 'impropers', 'pairs']) + 'container', ['particles', 'bonds', 'angles', 'dihedrals', 'impropers', 'pairs'] +) def test_no_duplicate_types(tmp_path, container): """Test that duplicate types raise an error.""" - with gsd.hoomd.open(name=tmp_path / "test_create.gsd", mode='w') as hf: - + with gsd.hoomd.open(name=tmp_path / 'test_create.gsd', mode='w') as hf: frame = gsd.hoomd.Frame() getattr(frame, container).types = ['A', 'B', 'B', 'C'] @@ -836,68 +889,73 @@ def test_read_log(tmp_path): ] frame1.log['value/pressure'] = [5] - with gsd.hoomd.open(name=tmp_path / "test_log.gsd", mode='w') as hf: + with gsd.hoomd.open(name=tmp_path / 'test_log.gsd', mode='w') as hf: hf.extend([frame0, frame1]) # Test scalar_only = False - logged_data_dict = gsd.hoomd.read_log(name=tmp_path / "test_log.gsd", - scalar_only=False) + logged_data_dict = gsd.hoomd.read_log( + name=tmp_path / 'test_log.gsd', scalar_only=False + ) assert len(logged_data_dict) == 5 assert list(logged_data_dict.keys()) == [ - 'configuration/step', 'log/particles/pair_lj_energy', - 'log/particles/pair_lj_force', 'log/value/potential_energy', - 'log/value/pressure' + 'configuration/step', + 'log/particles/pair_lj_energy', + 'log/particles/pair_lj_force', + 'log/value/potential_energy', + 'log/value/pressure', ] - numpy.testing.assert_array_equal(logged_data_dict['configuration/step'], - [0, 1]) + numpy.testing.assert_array_equal(logged_data_dict['configuration/step'], [0, 1]) numpy.testing.assert_array_equal( - logged_data_dict['log/particles/pair_lj_energy'], [ + logged_data_dict['log/particles/pair_lj_energy'], + [ frame0.log['particles/pair_lj_energy'], - frame1.log['particles/pair_lj_energy'] - ]) + frame1.log['particles/pair_lj_energy'], + ], + ) numpy.testing.assert_array_equal( - logged_data_dict['log/particles/pair_lj_force'], [ - frame0.log['particles/pair_lj_force'], - frame1.log['particles/pair_lj_force'] - ]) + logged_data_dict['log/particles/pair_lj_force'], + [frame0.log['particles/pair_lj_force'], frame1.log['particles/pair_lj_force']], + ) numpy.testing.assert_array_equal( - logged_data_dict['log/value/potential_energy'], [ - *frame0.log['value/potential_energy'], - *frame0.log['value/potential_energy'] - ]) + logged_data_dict['log/value/potential_energy'], + [*frame0.log['value/potential_energy'], *frame0.log['value/potential_energy']], + ) numpy.testing.assert_array_equal( logged_data_dict['log/value/pressure'], - [*frame0.log['value/pressure'], *frame1.log['value/pressure']]) + [*frame0.log['value/pressure'], *frame1.log['value/pressure']], + ) # Test scalar_only = True - logged_data_dict = gsd.hoomd.read_log(name=tmp_path / "test_log.gsd", - scalar_only=True) + logged_data_dict = gsd.hoomd.read_log( + name=tmp_path / 'test_log.gsd', scalar_only=True + ) assert len(logged_data_dict) == 3 assert list(logged_data_dict.keys()) == [ - 'configuration/step', 'log/value/potential_energy', 'log/value/pressure' + 'configuration/step', + 'log/value/potential_energy', + 'log/value/pressure', ] - numpy.testing.assert_array_equal(logged_data_dict['configuration/step'], - [0, 1]) + numpy.testing.assert_array_equal(logged_data_dict['configuration/step'], [0, 1]) numpy.testing.assert_array_equal( - logged_data_dict['log/value/potential_energy'], [ - *frame0.log['value/potential_energy'], - *frame0.log['value/potential_energy'] - ]) + logged_data_dict['log/value/potential_energy'], + [*frame0.log['value/potential_energy'], *frame0.log['value/potential_energy']], + ) numpy.testing.assert_array_equal( logged_data_dict['log/value/pressure'], - [*frame0.log['value/pressure'], *frame1.log['value/pressure']]) + [*frame0.log['value/pressure'], *frame1.log['value/pressure']], + ) def test_read_log_warning(tmp_path): """Test that read_log issues a warning.""" frame = gsd.hoomd.Frame() - with gsd.hoomd.open(name=tmp_path / "test_log.gsd", mode='w') as hf: + with gsd.hoomd.open(name=tmp_path / 'test_log.gsd', mode='w') as hf: hf.extend([frame]) with pytest.warns(RuntimeWarning): - log = gsd.hoomd.read_log(tmp_path / "test_log.gsd") + log = gsd.hoomd.read_log(tmp_path / 'test_log.gsd') assert list(log.keys()) == ['configuration/step'] diff --git a/gsd/test/test_largefile.py b/gsd/test/test_largefile.py index eb65d829..367fd642 100644 --- a/gsd/test/test_largefile.py +++ b/gsd/test/test_largefile.py @@ -12,30 +12,34 @@ @pytest.mark.validate() -@pytest.mark.parametrize("N", [2**27, 2**28, 2**29 + 1]) +@pytest.mark.parametrize('N', [2**27, 2**28, 2**29 + 1]) def test_large_n(tmp_path, N): """Test data chunks and files larger than 2 GB.""" gc.collect() data = numpy.linspace(0, N, num=N, endpoint=False, dtype=numpy.uint32) - with gsd.fl.open(name=tmp_path / 'test_large_N.gsd', - mode='x', - application='test_large_N', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_large_N.gsd', + mode='x', + application='test_large_N', + schema='none', + schema_version=[1, 2], + ) as f: f.write_chunk(name='data', data=data) f.end_frame() - with gsd.fl.open(name=tmp_path / 'test_large_N.gsd', - mode='r', - application='test_large_N', - schema='none', - schema_version=[1, 2]) as f: + with gsd.fl.open( + name=tmp_path / 'test_large_N.gsd', + mode='r', + application='test_large_N', + schema='none', + schema_version=[1, 2], + ) as f: read_data = f.read_chunk(frame=0, name='data') # compare the array with memory usage so this test can pass on CI # platforms - diff = (data - read_data) + diff = data - read_data data = None read_data = None gc.collect() diff --git a/gsd/version.py b/gsd/version.py index 28e27179..66b550a5 100644 --- a/gsd/version.py +++ b/gsd/version.py @@ -9,7 +9,7 @@ not the file layer version it reads/writes. """ -version = "3.2.0" +version = '3.2.0' __all__ = [ 'version', diff --git a/scripts/benchmark-hoomd.py b/scripts/benchmark-hoomd.py index 72155387..1e89478f 100755 --- a/scripts/benchmark-hoomd.py +++ b/scripts/benchmark-hoomd.py @@ -46,7 +46,7 @@ def write_file(file, nframes, N, position, orientation): for i in range(0, nframes): if i % step == 0: - print(i, "/", nframes, file=sys.stderr, flush=True) + print(i, '/', nframes, file=sys.stderr, flush=True) write_frame(file, i, position, orientation) @@ -60,7 +60,7 @@ def read_sequential_file(file, nframes, nframes_read, N, position, orientation): for i in range(0, nframes_read): if i % step == 0: - print(i, "/", nframes, file=sys.stderr, flush=True) + print(i, '/', nframes, file=sys.stderr, flush=True) read_frame(file, i, position, orientation) @@ -77,7 +77,7 @@ def read_random_file(file, nframes, nframes_read, N, position, orientation): for i, f in enumerate(frames[:nframes_read]): if i % step == 0: - print(i, "/", nframes, file=sys.stderr, flush=True) + print(i, '/', nframes, file=sys.stderr, flush=True) read_frame(file, f, position, orientation) @@ -113,7 +113,7 @@ def run_benchmarks(N, size): bmark_read_size = actual_size # first, write the file and time how long it takes - print("Writing file: ", file=sys.stderr, flush=True) + print('Writing file: ', file=sys.stderr, flush=True) # if the file size is small, write it once to warm up the disk if size < 64 * 1024**3: @@ -134,20 +134,19 @@ def run_benchmarks(N, size): timings['write'] = actual_size / 1024**2 / (end - start) # time how long it takes to open the file - print("Opening file... ", file=sys.stderr, flush=True, end='') + print('Opening file... ', file=sys.stderr, flush=True, end='') start = time.time() with gsd.hoomd.open(name='test.gsd', mode='r') as hf: end = time.time() - print(end - start, "s", file=sys.stderr, flush=True) + print(end - start, 's', file=sys.stderr, flush=True) - timings['open_time'] = (end - start) + timings['open_time'] = end - start # Read the file sequentially and measure the time taken - print("Sequential read file:", file=sys.stderr, flush=True) + print('Sequential read file:', file=sys.stderr, flush=True) start = time.time() - read_sequential_file(hf, nframes, nframes_read, N, position, - orientation) + read_sequential_file(hf, nframes, nframes_read, N, position, orientation) end = time.time() timings['seq_read'] = bmark_read_size / 1024**2 / (end - start) @@ -157,7 +156,7 @@ def run_benchmarks(N, size): call(['sudo', '/sbin/sysctl', 'vm.drop_caches=3'], stdout=PIPE) # Read the file randomly and measure the time taken - print("Random read file:", file=sys.stderr, flush=True) + print('Random read file:', file=sys.stderr, flush=True) start = time.time() read_random_file(hf, nframes, nframes_read, N, position, orientation) end = time.time() @@ -175,38 +174,59 @@ def run_sweep(size, size_str): if True: result = run_benchmarks(32 * 32, size) - print("{:<7} {:<6} {:<9.4g} {:<12.4g} " - "{:<11.4g} {:<13.4g} {:<11.3g}".format( - size_str, "32^2", result['open_time'] * 1000, result['write'], - result['seq_read'], result['random_read'], - result['random_read_time'])) + print( + '{:<7} {:<6} {:<9.4g} {:<12.4g} ' '{:<11.4g} {:<13.4g} {:<11.3g}'.format( + size_str, + '32^2', + result['open_time'] * 1000, + result['write'], + result['seq_read'], + result['random_read'], + result['random_read_time'], + ) + ) sys.stdout.flush() result = run_benchmarks(128 * 128, size) - print("{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}" - .format(size_str, "128^2", result['open_time'] * 1000, - result['write'], result['seq_read'], result['random_read'], - result['random_read_time'])) + print( + '{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}'.format( + size_str, + '128^2', + result['open_time'] * 1000, + result['write'], + result['seq_read'], + result['random_read'], + result['random_read_time'], + ) + ) sys.stdout.flush() result = run_benchmarks(1024 * 1024, size) - print("{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}" - .format(size_str, "1024^2", result['open_time'] * 1000, - result['write'], result['seq_read'], result['random_read'], - result['random_read_time'])) + print( + '{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}'.format( + size_str, + '1024^2', + result['open_time'] * 1000, + result['write'], + result['seq_read'], + result['random_read'], + result['random_read_time'], + ) + ) sys.stdout.flush() -print(""" +print( + """ ======= ====== ========= ============ =========== ============= =========== Size N Open (ms) Write (MB/s) Read (MB/s) Random (MB/s) Random (ms) -======= ====== ========= ============ =========== ============= ===========""") +======= ====== ========= ============ =========== ============= ===========""" +) -run_sweep(128 * 1024**2, "128 MiB") -run_sweep(1 * 1024**3, "1 GiB") +run_sweep(128 * 1024**2, '128 MiB') +run_sweep(1 * 1024**3, '1 GiB') # run_sweep(128*1024**3, "128 GiB"); -print("======= ====== ========= ============ " - "=========== ============= ===========") +print('======= ====== ========= ============ ' '=========== ============= ===========') diff --git a/setup.py b/setup.py index 95a2435c..3be888e1 100644 --- a/setup.py +++ b/setup.py @@ -9,12 +9,15 @@ from setuptools.extension import Extension extensions = cythonize( - [Extension( - 'gsd.fl', - sources=['gsd/fl.pyx', 'gsd/gsd.c'], - include_dirs=[numpy.get_include()], - define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")] - )], - compiler_directives={'language_level': 3}) + [ + Extension( + 'gsd.fl', + sources=['gsd/fl.pyx', 'gsd/gsd.c'], + include_dirs=[numpy.get_include()], + define_macros=[('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')], + ) + ], + compiler_directives={'language_level': 3}, +) setup(ext_modules=extensions) From dbf2265f71faa405170e56ebe93644f46431862e Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:47:27 -0500 Subject: [PATCH 6/9] Remove invalid modes. --- gsd/__main__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/gsd/__main__.py b/gsd/__main__.py index 9545af72..8828f1d3 100644 --- a/gsd/__main__.py +++ b/gsd/__main__.py @@ -129,13 +129,6 @@ def main(): type=str, default='r', choices=[ - 'rb', - 'rb+', - 'wb', - 'wb+', - 'xb', - 'xb+', - 'ab', 'w', 'r', 'r+', From 3e6490dca11983a81cf99d557dff754a36842ed3 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:50:38 -0500 Subject: [PATCH 7/9] Update change log. --- CHANGELOG.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 54d5789f..7722e4eb 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -17,6 +17,8 @@ Change Log * Provide support via GitHub discussions (`#308 `__). +* Use ruff + (`#317 `__). 3.2.0 (2023-09-27) ^^^^^^^^^^^^^^^^^^ From f95e5e3accfa09137839b092e8f8926033eab1a8 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:52:31 -0500 Subject: [PATCH 8/9] Fix ruff check. --- scripts/benchmark-hoomd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/benchmark-hoomd.py b/scripts/benchmark-hoomd.py index 1e89478f..85109fb8 100755 --- a/scripts/benchmark-hoomd.py +++ b/scripts/benchmark-hoomd.py @@ -175,7 +175,7 @@ def run_sweep(size, size_str): result = run_benchmarks(32 * 32, size) print( - '{:<7} {:<6} {:<9.4g} {:<12.4g} ' '{:<11.4g} {:<13.4g} {:<11.3g}'.format( + '{:<7} {:<6} {:<9.4g} {:<12.4g} {:<11.4g} {:<13.4g} {:<11.3g}'.format( size_str, '32^2', result['open_time'] * 1000, @@ -229,4 +229,4 @@ def run_sweep(size, size_str): run_sweep(1 * 1024**3, '1 GiB') # run_sweep(128*1024**3, "128 GiB"); -print('======= ====== ========= ============ ' '=========== ============= ===========') +print('======= ====== ========= ============ =========== ============= ===========') From 774a26f64f43766ba3cda3da03a2c135919b3b54 Mon Sep 17 00:00:00 2001 From: "Joshua A. Anderson" Date: Wed, 17 Jan 2024 16:57:26 -0500 Subject: [PATCH 9/9] Adjust vscode rulers. --- .vscode/settings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 57bba077..430025de 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,12 +13,12 @@ }, "[python]": { "editor.rulers": [ - 80 + 88 ] }, "[cython]": { "editor.rulers": [ - 80 + 88 ] }, "[markdown]": {