From d1203d234ce283d36050ebbeafbcc36d1a36ebd0 Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Mon, 9 Sep 2024 09:48:36 +0200 Subject: [PATCH 1/5] Generic clean up --- mikeio/generic.py | 58 ++++++++++++++--------------------------------- 1 file changed, 17 insertions(+), 41 deletions(-) diff --git a/mikeio/generic.py b/mikeio/generic.py index 7cf0e29aa..63468d426 100644 --- a/mikeio/generic.py +++ b/mikeio/generic.py @@ -1,4 +1,5 @@ from __future__ import annotations +from dataclasses import dataclass import math import os import pathlib @@ -36,42 +37,22 @@ show_progress = True +__all__ = [ + "avg_time", + "concat", + "diff", + "extract", + "fill_corrupt", + "quantile", + "scale", + "sum", +] -class _ChunkInfo: - """Class for keeping track of an chunked processing - - Parameters - ---------- - n_data : int - number of data points - n_chunks : int - number of chunks - - Attributes - ---------- - n_data : int - number of data points - n_chunks : int - number of chunks - chunk_size : int - number of data points per chunk - - Methods - ------- - stop(start) - Return the stop index for a chunk - chunk_end(start) - Return the end index for a chunk - from_dfs(dfs, item_numbers, buffer_size) - Calculate chunk info based on # of elements in dfs file and selected buffer size - """ - - def __init__(self, n_data: int, n_chunks: int): - self.n_data = n_data - self.n_chunks = n_chunks - def __repr__(self) -> str: - return f"_ChunkInfo(n_chunks={self.n_chunks}, n_data={self.n_data}, chunk_size={self.chunk_size})" +@dataclass +class _ChunkInfo: + n_data: int + n_chunks: int @property def chunk_size(self) -> int: @@ -187,17 +168,11 @@ def _clone( else: raise ValueError("Items of type: {type(items)} is not supported") - # Create file builder.CreateFile(str(outfilename)) - # Copy static items - while True: - static_item = source.ReadStaticItemNext() - if static_item is None: - break + for static_item in iter(source.ReadStaticItemNext, None): builder.AddStaticItem(static_item) - # Get the file file = builder.GetFile() source.Close() @@ -318,6 +293,7 @@ def fill_corrupt( dfs.Close() +# TODO sum is conflicting with the built-in sum function, which we could haved used above in line 78. Consider renaming. def sum( infilename_a: str | pathlib.Path, infilename_b: str | pathlib.Path, From 73a8e08b80e7b646583b6564d87d721fa26da365 Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Mon, 9 Sep 2024 17:06:17 +0200 Subject: [PATCH 2/5] Extract method --- mikeio/generic.py | 80 ++++++++++++++++++----------------------------- 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/mikeio/generic.py b/mikeio/generic.py index 63468d426..15bbbcce9 100644 --- a/mikeio/generic.py +++ b/mikeio/generic.py @@ -1,13 +1,15 @@ from __future__ import annotations from dataclasses import dataclass import math +import operator import os import pathlib from copy import deepcopy from datetime import datetime, timedelta from shutil import copyfile from collections.abc import Iterable, Sequence -from typing import Union +from typing import Callable, Union +import warnings import numpy as np @@ -293,13 +295,13 @@ def fill_corrupt( dfs.Close() -# TODO sum is conflicting with the built-in sum function, which we could haved used above in line 78. Consider renaming. -def sum( +def _process_dfs_files( infilename_a: str | pathlib.Path, infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, + op: Callable[[np.ndarray, np.ndarray], np.ndarray], ) -> None: - """Sum two dfs files (a+b) + """Process two dfs files with a specified operation Parameters ---------- @@ -309,6 +311,8 @@ def sum( full path to the second input file outfilename: str | pathlib.Path full path to the output file + op: Callable[[np.ndarray, np.ndarray], np.ndarray] + operation to perform on the data arrays """ infilename_a = str(infilename_a) infilename_b = str(infilename_b) @@ -333,10 +337,10 @@ def sum( itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep) d_b = itemdata_b.Data - d_a[d_a == deletevalue] = np.nan + d_b[d_b == deletevalue] = np.nan time = itemdata_a.Time - outdata = d_a + d_b + outdata = op(d_a, d_b) darray = outdata.astype(np.float32) @@ -347,59 +351,35 @@ def sum( dfs_o.Close() -def diff( +# TODO sum is conflicting with the built-in sum function, which we could haved used above. +def sum( infilename_a: str | pathlib.Path, infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Calculate difference between two dfs files (a-b) + """Sum two dfs files (a+b)""" - Parameters - ---------- - infilename_a: str | pathlib.Path - full path to the first input file - infilename_b: str | pathlib.Path - full path to the second input file - outfilename: str | pathlib.Path - full path to the output file - """ - infilename_a = str(infilename_a) - infilename_b = str(infilename_b) - outfilename = str(outfilename) + # deprecated + warnings.warn(FutureWarning("This function is deprecated. Use add instead.")) + _process_dfs_files(infilename_a, infilename_b, outfilename, operator.add) - copyfile(infilename_a, outfilename) - dfs_i_a = DfsFileFactory.DfsGenericOpen(infilename_a) - dfs_i_b = DfsFileFactory.DfsGenericOpen(infilename_b) - dfs_o = DfsFileFactory.DfsGenericOpenEdit(outfilename) - - deletevalue = dfs_i_a.FileInfo.DeleteValueFloat - - n_time_steps = dfs_i_a.FileInfo.TimeAxis.NumberOfTimeSteps - n_items = len(dfs_i_a.ItemInfo) - # TODO Add checks to verify identical structure of file a and b - - for timestep in trange(n_time_steps): - for item in range(n_items): - itemdata_a = dfs_i_a.ReadItemTimeStep(item + 1, timestep) - d_a = itemdata_a.Data - d_a[d_a == deletevalue] = np.nan - - itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep) - d_b = itemdata_b.Data - d_b[d_b == deletevalue] = np.nan - time = itemdata_a.Time - - outdata = d_a - d_b - - d = outdata.astype(np.float32) - d[np.isnan(d)] = deletevalue +def add( + infilename_a: str | pathlib.Path, + infilename_b: str | pathlib.Path, + outfilename: str | pathlib.Path, +) -> None: + """Sum two dfs files (a+b)""" + _process_dfs_files(infilename_a, infilename_b, outfilename, operator.add) - dfs_o.WriteItemTimeStep(item + 1, timestep, time, d) - dfs_i_a.Close() - dfs_i_b.Close() - dfs_o.Close() +def diff( + infilename_a: str | pathlib.Path, + infilename_b: str | pathlib.Path, + outfilename: str | pathlib.Path, +) -> None: + """Calculate difference between two dfs files (a-b)""" + _process_dfs_files(infilename_a, infilename_b, outfilename, operator.sub) def concat( From 6f614a3da6223e4d64aa51d528c223e19ab99895 Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Mon, 9 Sep 2024 17:06:26 +0200 Subject: [PATCH 3/5] Test type hints --- tests/test_generic.py | 109 +++++++++++++++++++----------------------- 1 file changed, 50 insertions(+), 59 deletions(-) diff --git a/tests/test_generic.py b/tests/test_generic.py index aa3112923..45d4d3bfe 100644 --- a/tests/test_generic.py +++ b/tests/test_generic.py @@ -1,13 +1,13 @@ +from pathlib import Path import numpy as np import pandas as pd import mikeio from mikeio import generic -from mikeio.generic import scale, diff, sum, extract, avg_time, fill_corrupt +from mikeio.generic import scale, diff, sum, extract, avg_time, fill_corrupt, add import pytest -def test_add_constant(tmp_path): - +def test_add_constant(tmp_path: Path) -> None: infilename = "tests/testdata/random.dfs0" fp = tmp_path / "add.dfs0" scale(infilename, fp, offset=100.0) @@ -22,8 +22,7 @@ def test_add_constant(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_multiply_constant(tmp_path): - +def test_multiply_constant(tmp_path: Path) -> None: infilename = "tests/testdata/random.dfs0" fp = tmp_path / "mult.dfs0" scale(infilename, fp, factor=1.5) @@ -38,8 +37,7 @@ def test_multiply_constant(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_multiply_constant_single_item_number(tmp_path): - +def test_multiply_constant_single_item_number(tmp_path: Path) -> None: infilename = "tests/testdata/wind_north_sea.dfsu" fp = tmp_path / "mult.dfsu" scale(infilename, fp, factor=1.5, items=[0]) @@ -59,8 +57,7 @@ def test_multiply_constant_single_item_number(tmp_path): assert scaledvalue_dir == pytest.approx(expected_dir) -def test_multiply_constant_single_item_name(tmp_path): - +def test_multiply_constant_single_item_name(tmp_path: Path) -> None: infilename = "tests/testdata/wind_north_sea.dfsu" fp = tmp_path / "multname.dfsu" scale(infilename, fp, factor=1.5, items=["Wind speed"]) @@ -80,8 +77,7 @@ def test_multiply_constant_single_item_name(tmp_path): assert scaledvalue_dir == pytest.approx(expected_dir) -def test_diff_itself(tmp_path): - +def test_diff_itself(tmp_path: Path) -> None: infilename_1 = "tests/testdata/gebco_sound.dfs2" infilename_2 = "tests/testdata/gebco_sound.dfs2" fp = tmp_path / "diff.dfs2" @@ -99,13 +95,13 @@ def test_diff_itself(tmp_path): assert np.isnan(diffed["Elevation"].to_numpy()[0, -1, -1]) -def test_sum_itself(tmp_path): - +def test_sum_itself_deprecated(tmp_path: Path) -> None: infilename_1 = "tests/testdata/gebco_sound.dfs2" infilename_2 = "tests/testdata/gebco_sound.dfs2" fp = tmp_path / "diff.dfs2" - sum(infilename_1, infilename_2, fp) + with pytest.warns(FutureWarning): + sum(infilename_1, infilename_2, fp) mikeio.read(infilename_1) @@ -114,8 +110,21 @@ def test_sum_itself(tmp_path): assert np.isnan(summed["Elevation"].to_numpy()[0][-1, -1]) -def test_add_constant_delete_values_unchanged(tmp_path): +def test_add_itself(tmp_path: Path) -> None: + infilename_1 = "tests/testdata/gebco_sound.dfs2" + infilename_2 = "tests/testdata/gebco_sound.dfs2" + fp = tmp_path / "diff.dfs2" + + add(infilename_1, infilename_2, fp) + + mikeio.read(infilename_1) + + summed = mikeio.read(fp) + assert np.isnan(summed["Elevation"].to_numpy()[0][-1, -1]) + + +def test_add_constant_delete_values_unchanged(tmp_path: Path) -> None: infilename = "tests/testdata/gebco_sound.dfs2" fp = tmp_path / "adj.dfs2" scale(infilename, fp, offset=-2.1, items=["Elevation"]) @@ -135,8 +144,7 @@ def test_add_constant_delete_values_unchanged(tmp_path): assert np.isnan(scaledvalue) -def test_multiply_constant_delete_values_unchanged_2(tmp_path): - +def test_multiply_constant_delete_values_unchanged_2(tmp_path: Path) -> None: infilename = "tests/testdata/random_two_item.dfs2" fp = tmp_path / "adj.dfs2" @@ -159,8 +167,7 @@ def test_multiply_constant_delete_values_unchanged_2(tmp_path): assert np.isnan(scaledvalue) -def test_linear_transform(tmp_path): - +def test_linear_transform(tmp_path: Path) -> None: infilename = "tests/testdata/random.dfs0" fp = tmp_path / "linear.dfs0" scale(infilename, fp, offset=-20.0, factor=1.5) @@ -175,8 +182,7 @@ def test_linear_transform(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_linear_transform_dfsu(tmp_path): - +def test_linear_transform_dfsu(tmp_path: Path) -> None: infilename = "tests/testdata/HD2D.dfsu" fp = tmp_path / "linear.dfsu" scale(infilename, fp, offset=-20.0, factor=1.5) @@ -191,8 +197,7 @@ def test_linear_transform_dfsu(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_sum_dfsu(tmp_path): - +def test_sum_dfsu(tmp_path: Path) -> None: infilename_a = "tests/testdata/HD2D.dfsu" infilename_b = "tests/testdata/HD2D.dfsu" fp = tmp_path / "sum.dfsu" @@ -208,8 +213,7 @@ def test_sum_dfsu(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_diff_dfsu(tmp_path): - +def test_diff_dfsu(tmp_path: Path) -> None: infilename_a = "tests/testdata/HD2D.dfsu" infilename_b = "tests/testdata/HD2D.dfsu" fp = tmp_path / "diff.dfsu" @@ -224,7 +228,7 @@ def test_diff_dfsu(tmp_path): assert scaledvalue == pytest.approx(expected) -def test_concat_overlapping(tmp_path): +def test_concat_overlapping(tmp_path: Path) -> None: infilename_a = "tests/testdata/tide1.dfs1" infilename_b = "tests/testdata/tide2.dfs1" fp = tmp_path / "concat.dfs1" @@ -235,7 +239,7 @@ def test_concat_overlapping(tmp_path): assert len(ds.time) == 145 -def test_concat_files_gap_fail(tmp_path): +def test_concat_files_gap_fail(tmp_path: Path) -> None: infilename_a = "tests/testdata/tide1.dfs1" infilename_b = "tests/testdata/tide4.dfs1" fp = tmp_path / "concat.dfs1" @@ -243,7 +247,7 @@ def test_concat_files_gap_fail(tmp_path): mikeio.generic.concat([infilename_a, infilename_b], fp) -def test_concat_three_files(tmp_path): +def test_concat_three_files(tmp_path: Path) -> None: infiles = [ "tests/testdata/tide1.dfs1", "tests/testdata/tide2.dfs1", @@ -257,7 +261,7 @@ def test_concat_three_files(tmp_path): assert len(ds.time) == (5 * 48 + 1) -def test_concat_keep(tmp_path): +def test_concat_keep(tmp_path: Path) -> None: """ test keep arguments of concatenation function """ @@ -318,8 +322,7 @@ def test_concat_keep(tmp_path): assert last_out, "overlap should be with last dataset" -def test_concat_non_equidistant_dfs0(tmp_path): - +def test_concat_non_equidistant_dfs0(tmp_path: Path) -> None: # create two non-equidistant dfs0 files da1 = mikeio.DataArray( data=np.array([0.0, 0.1, 0.2]), @@ -351,8 +354,7 @@ def test_concat_non_equidistant_dfs0(tmp_path): ) -def test_extract_equidistant(tmp_path): - +def test_extract_equidistant(tmp_path: Path) -> None: infile = "tests/testdata/waves.dfs2" fp = tmp_path / "waves_subset.dfs2" @@ -372,8 +374,7 @@ def test_extract_equidistant(tmp_path): extract(infile, fp, start=1000) -def test_extract_non_equidistant(tmp_path): - +def test_extract_non_equidistant(tmp_path: Path) -> None: infile = "tests/testdata/da_diagnostic.dfs0" fp = tmp_path / "da_diagnostic_subset.dfs0" @@ -399,8 +400,7 @@ def test_extract_non_equidistant(tmp_path): extract(infile, fp, start=7200.0, end=1800.0) -def test_extract_relative_time_axis(tmp_path): - +def test_extract_relative_time_axis(tmp_path: Path) -> None: infile = "tests/testdata/eq_relative.dfs0" fp = tmp_path / "eq_relative_subset.dfs0" @@ -408,8 +408,7 @@ def test_extract_relative_time_axis(tmp_path): extract(infile, fp, start=0, end=4) -def test_extract_step_equidistant(tmp_path): - +def test_extract_step_equidistant(tmp_path: Path) -> None: infile = "tests/testdata/tide1.dfs1" # 30min fp = tmp_path / "tide1_step12.dfs1" @@ -422,8 +421,7 @@ def test_extract_step_equidistant(tmp_path): assert extracted.time[-1] == orig.time[-1] -def test_extract_step_non_equidistant(tmp_path): - +def test_extract_step_non_equidistant(tmp_path: Path) -> None: infile = "tests/testdata/da_diagnostic.dfs0" fp = tmp_path / "da_diagnostic_step3.dfs0" @@ -435,8 +433,7 @@ def test_extract_step_non_equidistant(tmp_path): assert extracted.time[-1] == orig.time[-3] -def test_extract_items(tmp_path): - +def test_extract_items(tmp_path: Path) -> None: # This is a Dfsu 3d file (i.e. first item is Z coordinate) infile = "tests/testdata/oresund_vertical_slice.dfsu" fp = tmp_path / "oresund_vertical_slice_extract.dfsu" @@ -472,8 +469,7 @@ def test_extract_items(tmp_path): extract(infile, fp, items=[0, "not_an_item"]) -def test_time_average(tmp_path): - +def test_time_average(tmp_path: Path) -> None: infilename = "tests/testdata/NorthSea_HD_and_windspeed.dfsu" fp = tmp_path / "NorthSea_HD_and_windspeed_avg.dfsu" avg_time(infilename, fp) @@ -489,7 +485,7 @@ def test_time_average(tmp_path): assert np.allclose(org.mean(axis=0)[0].to_numpy(), averaged[0].to_numpy()) -def test_time_average_dfsu_3d(tmp_path): +def test_time_average_dfsu_3d(tmp_path: Path) -> None: infilename = "tests/testdata/oresund_sigma_z.dfsu" fp = tmp_path / "oresund_sigma_z_avg.dfsu" avg_time(infilename, fp) @@ -502,8 +498,7 @@ def test_time_average_dfsu_3d(tmp_path): assert org.n_items == averaged.n_items -def test_time_average_deletevalues(tmp_path): - +def test_time_average_deletevalues(tmp_path: Path) -> None: infilename = "tests/testdata/gebco_sound.dfs2" fp = tmp_path / "gebco_sound_avg.dfs2" avg_time(infilename, fp) @@ -520,8 +515,7 @@ def test_time_average_deletevalues(tmp_path): assert np.allclose(org[0].to_numpy()[~nan1], averaged[0].to_numpy()[~nan2]) -def test_quantile_dfsu(tmp_path): - +def test_quantile_dfsu(tmp_path: Path) -> None: infilename = "tests/testdata/oresundHD_run1.dfsu" fp = tmp_path / "oresund_q10.dfsu" generic.quantile(infilename, fp, q=0.1, items=["Surface elevation"]) @@ -532,8 +526,7 @@ def test_quantile_dfsu(tmp_path): assert np.allclose(org[0].to_numpy(), q10[0].to_numpy()) -def test_quantile_dfsu_buffer_size(tmp_path): - +def test_quantile_dfsu_buffer_size(tmp_path: Path) -> None: infilename = "tests/testdata/oresundHD_run1.dfsu" fp = tmp_path / "oresund_q10.dfsu" generic.quantile(infilename, fp, q=0.1, buffer_size=1e5, items=0) @@ -544,8 +537,7 @@ def test_quantile_dfsu_buffer_size(tmp_path): assert np.allclose(org[0].to_numpy(), q10[0].to_numpy()) -def test_quantile_dfs2(tmp_path): - +def test_quantile_dfs2(tmp_path: Path) -> None: infilename = "tests/testdata/eq.dfs2" fp = tmp_path / "eq_q90.dfs2" generic.quantile(infilename, fp, q=0.9) @@ -556,8 +548,7 @@ def test_quantile_dfs2(tmp_path): assert np.allclose(org[0].to_numpy(), q90[0].to_numpy()) -def test_quantile_dfs0(tmp_path): - +def test_quantile_dfs0(tmp_path: Path) -> None: infilename = "tests/testdata/da_diagnostic.dfs0" fp = tmp_path / "da_q001_q05.dfs0" generic.quantile(infilename, fp, q=[0.01, 0.5]) @@ -569,7 +560,7 @@ def test_quantile_dfs0(tmp_path): # assert np.allclose(org[5], qnt[5]) -def test_quantile_dfsu_3d(tmp_path): +def test_quantile_dfsu_3d(tmp_path: Path) -> None: infilename = "tests/testdata/oresund_sigma_z.dfsu" fp = tmp_path / "oresund_sigma_z_q10_90.dfsu" generic.quantile(infilename, fp, q=[0.1, 0.9], items=["Temperature"]) @@ -578,7 +569,7 @@ def test_quantile_dfsu_3d(tmp_path): assert qd.n_timesteps == 1 -def test_dfs_ext_capitalisation(tmp_path): +def test_dfs_ext_capitalisation(tmp_path: Path) -> None: filename = "tests/testdata/waves2.DFS0" ds = mikeio.open(filename) ds = mikeio.read(filename) @@ -590,7 +581,7 @@ def test_dfs_ext_capitalisation(tmp_path): assert True -def test_fill_corrupt_data(tmp_path): +def test_fill_corrupt_data(tmp_path: Path) -> None: """This test doesn't verify much...""" infile = "tests/testdata/waves.dfs2" From ccd178e1de47a837f5fb45fafe8996b6b1d49f21 Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Mon, 9 Sep 2024 17:31:45 +0200 Subject: [PATCH 4/5] Remove unused --- mikeio/generic.py | 16 +++++----------- tests/test_generic.py | 7 ++++++- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/mikeio/generic.py b/mikeio/generic.py index 15bbbcce9..87fcdb0f2 100644 --- a/mikeio/generic.py +++ b/mikeio/generic.py @@ -90,7 +90,7 @@ def _clone( outfilename: str | pathlib.Path, start_time: datetime | None = None, timestep: float | None = None, - items: Sequence[int | str | DfsDynamicItemInfo] | None = None, + items: Sequence[int | DfsDynamicItemInfo] | None = None, ) -> DfsFile: """Clone a dfs file @@ -104,7 +104,7 @@ def _clone( new start time for the new file, by default None timestep : float, optional new timestep (in seconds) for the new file, by default None - items : list(int,str,eum.ItemInfo), optional + items : list(int,eum.ItemInfo), optional list of items for new file, either as a list of ItemInfo or a list of str/int referring to original file, default: all items from original file @@ -142,9 +142,6 @@ def _clone( for customBlock in fi.CustomBlocks: builder.AddCustomBlock(customBlock) - names = [x.Name for x in source.ItemInfo] - item_lookup = {name: i for i, name in enumerate(names)} - if isinstance(items, Iterable) and not isinstance(items, str): for item in items: if isinstance(item, ItemInfo): @@ -157,11 +154,8 @@ def _clone( builder.AddDynamicItem(item) elif isinstance(item, int): builder.AddDynamicItem(source.ItemInfo[item]) - elif isinstance(item, str): - item_no = item_lookup[item] - builder.AddDynamicItem(source.ItemInfo[item_no]) - elif isinstance(items, (int, str)) or items is None: + elif isinstance(items, (int)) or items is None: # must be str/int refering to original file (or None) item_numbers = _valid_item_numbers(source.ItemInfo, items) items = [source.ItemInfo[item] for item in item_numbers] @@ -507,8 +501,8 @@ def concat( def extract( infilename: str | pathlib.Path, outfilename: str | pathlib.Path, - start: int = 0, - end: int = -1, + start: int | float | str | datetime = 0, + end: int | float | str | datetime = -1, step: int = 1, items: Sequence[int | str] | None = None, ) -> None: diff --git a/tests/test_generic.py b/tests/test_generic.py index 45d4d3bfe..ca1088779 100644 --- a/tests/test_generic.py +++ b/tests/test_generic.py @@ -201,7 +201,9 @@ def test_sum_dfsu(tmp_path: Path) -> None: infilename_a = "tests/testdata/HD2D.dfsu" infilename_b = "tests/testdata/HD2D.dfsu" fp = tmp_path / "sum.dfsu" - mikeio.generic.sum(infilename_a, infilename_b, fp) + + with pytest.warns(FutureWarning): + mikeio.generic.sum(infilename_a, infilename_b, fp) org = mikeio.read(infilename_a) @@ -373,6 +375,9 @@ def test_extract_equidistant(tmp_path: Path) -> None: with pytest.raises(ValueError): extract(infile, fp, start=1000) + with pytest.raises(ValueError): + extract(infile, fp, end=1000) + def test_extract_non_equidistant(tmp_path: Path) -> None: infile = "tests/testdata/da_diagnostic.dfs0" From a50a282cfd7b631ddab57e07c41aec5d6a7708df Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Fri, 22 Nov 2024 08:54:51 +0100 Subject: [PATCH 5/5] Period --- mikeio/generic.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/mikeio/generic.py b/mikeio/generic.py index b5bc3c0fe..e5d5501e6 100644 --- a/mikeio/generic.py +++ b/mikeio/generic.py @@ -388,8 +388,7 @@ def sum( infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Sum two dfs files (a+b)""" - + """Sum two dfs files (a+b).""" # deprecated warnings.warn(FutureWarning("This function is deprecated. Use add instead.")) _process_dfs_files(infilename_a, infilename_b, outfilename, operator.add) @@ -400,7 +399,7 @@ def add( infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Sum two dfs files (a+b)""" + """Add two dfs files (a+b).""" _process_dfs_files(infilename_a, infilename_b, outfilename, operator.add) @@ -409,7 +408,7 @@ def diff( infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Calculate difference between two dfs files (a-b)""" + """Calculate difference between two dfs files (a-b).""" _process_dfs_files(infilename_a, infilename_b, outfilename, operator.sub)