From 244bd45d829357ff6cf11b185d3a0c9007040b12 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 7 Aug 2023 12:02:01 -0700 Subject: [PATCH 01/23] check for missing fov files --- src/toffy/json_utils.py | 36 ++++++++++++++++++++++++++++++++++++ tests/json_utils_test.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+) diff --git a/src/toffy/json_utils.py b/src/toffy/json_utils.py index f0bbd562..8b90e0dd 100644 --- a/src/toffy/json_utils.py +++ b/src/toffy/json_utils.py @@ -240,3 +240,39 @@ def check_fov_resolutions(bin_file_dir, run_name, save_path=None): resolution_data.to_csv(save_path, index=False) return resolution_data + + +def missing_fov_check(bin_file_dir, run_name): + """Use the run metadata to check if all FOV data was generated + Args: + bin_file_dir (str): directory containing the run json file + run_name (str): name of the run and corresponding run file + + Raises: + FileNotFoundError: FOV specified in the run file doesn't have associated bin/json file + """ + + # read in run file + run_file_path = os.path.join(bin_file_dir, run_name + ".json") + io_utils.validate_paths([run_file_path]) + run_metadata = read_json_file(run_file_path, encoding="utf-8") + + missing_fovs = {} + for fov in run_metadata.get("fovs", ()): + # get fov names + fov_number = fov.get("runOrder") + default_name = f"fov-{fov_number}-scan-1" + custom_name = fov.get("name") + + # add default name and custom name for missing data + if not os.path.exists( + os.path.join(bin_file_dir, default_name + ".bin") + ) or not os.path.exists(os.path.join(bin_file_dir, default_name + ".json")): + missing_fovs[default_name] = [custom_name] + + elif os.path.getsize(os.path.join(bin_file_dir, default_name + ".json")) == 0: + missing_fovs[default_name] = [custom_name] + + if missing_fovs: + missing_fovs = pd.DataFrame(missing_fovs).to_string(index=False) + raise FileNotFoundError(f"The following data was not generated:\n {missing_fovs}") diff --git a/tests/json_utils_test.py b/tests/json_utils_test.py index 444bc092..c98c3c4c 100644 --- a/tests/json_utils_test.py +++ b/tests/json_utils_test.py @@ -232,3 +232,40 @@ def test_check_fov_resolutions(): ) assert os.path.exists(os.path.join(temp_dir, "resolution_data.csv")) + + +def test_missing_fov_check(): + with tempfile.TemporaryDirectory() as temp_dir: + run_file_name = "test" + run_data = { + "fovs": [ + {"runOrder": 1, "name": "image_1"}, + {"runOrder": 2, "name": "image_2"}, + { + "runOrder": 3, + "name": "image_3", + }, + ] + } + json_utils.write_json_file( + os.path.join(temp_dir, run_file_name + ".json"), run_data, "utf-8" + ) + + for fov in ["fov-1-scan-1", "fov-2-scan-1", "fov-3-scan-1"]: + json_utils.write_json_file(os.path.join(temp_dir, fov + ".json"), ["test_data"]) + _make_blank_file(temp_dir, fov + ".bin") + + # test success + json_utils.missing_fov_check(temp_dir, run_file_name) + + # check missing bin file raises warning + os.remove(os.path.join(temp_dir, "fov-1-scan-1.bin")) + with pytest.raises(FileNotFoundError): + json_utils.missing_fov_check(temp_dir, run_file_name) + _make_blank_file(temp_dir, "fov-1-scan-1.bin") + + # check empty json file raises warning + os.remove(os.path.join(temp_dir, "fov-2-scan-1.json")) + _make_blank_file(temp_dir, "fov-2-scan-1.json") + with pytest.raises(FileNotFoundError): + json_utils.missing_fov_check(temp_dir, run_file_name) From f38e68dfc25963d46a1c009d87dc5d16a83c6dea Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 7 Aug 2023 12:59:51 -0700 Subject: [PATCH 02/23] watcher check --- src/toffy/watcher_callbacks.py | 8 ++++++++ tests/utils/test_utils.py | 4 ++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/toffy/watcher_callbacks.py b/src/toffy/watcher_callbacks.py index 4e12f76b..da688932 100644 --- a/src/toffy/watcher_callbacks.py +++ b/src/toffy/watcher_callbacks.py @@ -16,6 +16,7 @@ from mibi_bin_tools.type_utils import any_true from toffy.image_stitching import stitch_images +from toffy.json_utils import missing_fov_check from toffy.mph_comp import combine_mph_metrics, compute_mph_metrics, visualize_mph from toffy.normalize import write_mph_per_mass from toffy.qc_comp import combine_qc_metrics, compute_qc_metrics_direct @@ -117,6 +118,13 @@ def image_stitching(self, tiff_out_dir, **kwargs): stitch_images(tiff_out_dir, self.run_folder, **viz_kwargs) + def check_missing_fovs(self): + """Checks for associated bin/json files per FOV + Raises: + FileNotFound error if any fov data is missing + """ + missing_fov_check(self.run_folder, os.path.basename(self)) + @dataclass class FovCallbacks: diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index 851d6cd4..63e88eb7 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -173,7 +173,7 @@ def generate_sample_fovs_list(fov_coords, fov_names, fov_sizes): "generate_mph", "generate_pulse_heights", ) -RUN_CALLBACKS = ("plot_qc_metrics", "plot_mph_metrics", "image_stitching") +RUN_CALLBACKS = ("plot_qc_metrics", "plot_mph_metrics", "image_stitching", "check_missing_fovs") def mock_visualize_qc_metrics( @@ -558,7 +558,7 @@ def case_default(self, intensity, replace): kwargs = {"panel": panel, "intensities": intensity, "replace": replace} return ( - ["plot_qc_metrics", "plot_mph_metrics", "image_stitching"], + ["plot_qc_metrics", "plot_mph_metrics", "image_stitching", "check_missing_fovs"], None, ["extract_tiffs", "generate_pulse_heights"], kwargs, From 8a26f0f8f36d32935ffe8ad54b3745e1f7f1cbf6 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 7 Aug 2023 15:33:22 -0700 Subject: [PATCH 03/23] kawrgs --- src/toffy/watcher_callbacks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/toffy/watcher_callbacks.py b/src/toffy/watcher_callbacks.py index da688932..a6d3c6a6 100644 --- a/src/toffy/watcher_callbacks.py +++ b/src/toffy/watcher_callbacks.py @@ -118,7 +118,7 @@ def image_stitching(self, tiff_out_dir, **kwargs): stitch_images(tiff_out_dir, self.run_folder, **viz_kwargs) - def check_missing_fovs(self): + def check_missing_fovs(self, **kwargs): """Checks for associated bin/json files per FOV Raises: FileNotFound error if any fov data is missing From a3d59e7d64d320c03ab2342fe052395c2b570a90 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 7 Aug 2023 16:23:17 -0700 Subject: [PATCH 04/23] basename fix --- src/toffy/watcher_callbacks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/toffy/watcher_callbacks.py b/src/toffy/watcher_callbacks.py index a6d3c6a6..227258b5 100644 --- a/src/toffy/watcher_callbacks.py +++ b/src/toffy/watcher_callbacks.py @@ -123,7 +123,7 @@ def check_missing_fovs(self, **kwargs): Raises: FileNotFound error if any fov data is missing """ - missing_fov_check(self.run_folder, os.path.basename(self)) + missing_fov_check(self.run_folder, os.path.basename(self.run_folder)) @dataclass From e494928fd31c43dbfcda6a7000a9c0dde3315b7c Mon Sep 17 00:00:00 2001 From: csowers Date: Tue, 8 Aug 2023 13:23:01 -0700 Subject: [PATCH 05/23] warning instead of error --- src/toffy/json_utils.py | 7 +++++-- src/toffy/watcher_callbacks.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/toffy/json_utils.py b/src/toffy/json_utils.py index 8b90e0dd..57e12390 100644 --- a/src/toffy/json_utils.py +++ b/src/toffy/json_utils.py @@ -249,7 +249,7 @@ def missing_fov_check(bin_file_dir, run_name): run_name (str): name of the run and corresponding run file Raises: - FileNotFoundError: FOV specified in the run file doesn't have associated bin/json file + Warning if any FOVs specified in the run file doesn't have associated bin/json files """ # read in run file @@ -275,4 +275,7 @@ def missing_fov_check(bin_file_dir, run_name): if missing_fovs: missing_fovs = pd.DataFrame(missing_fovs).to_string(index=False) - raise FileNotFoundError(f"The following data was not generated:\n {missing_fovs}") + warnings.warn( + "The following FOVs were not processed due to missing/empty/late files: \n" + f" {missing_fovs}" + ) diff --git a/src/toffy/watcher_callbacks.py b/src/toffy/watcher_callbacks.py index 227258b5..1e8ad09d 100644 --- a/src/toffy/watcher_callbacks.py +++ b/src/toffy/watcher_callbacks.py @@ -121,7 +121,7 @@ def image_stitching(self, tiff_out_dir, **kwargs): def check_missing_fovs(self, **kwargs): """Checks for associated bin/json files per FOV Raises: - FileNotFound error if any fov data is missing + Warning if any fov data is missing """ missing_fov_check(self.run_folder, os.path.basename(self.run_folder)) From 622fb3bc063b2c2a507ccf20e0bc369fe20c8448 Mon Sep 17 00:00:00 2001 From: csowers Date: Tue, 8 Aug 2023 22:21:28 -0700 Subject: [PATCH 06/23] add warning watcher test --- src/toffy/json_utils.py | 2 +- tests/fov_watcher_test.py | 27 ++++++++++++++++++++++----- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/src/toffy/json_utils.py b/src/toffy/json_utils.py index 57e12390..72350d27 100644 --- a/src/toffy/json_utils.py +++ b/src/toffy/json_utils.py @@ -277,5 +277,5 @@ def missing_fov_check(bin_file_dir, run_name): missing_fovs = pd.DataFrame(missing_fovs).to_string(index=False) warnings.warn( "The following FOVs were not processed due to missing/empty/late files: \n" - f" {missing_fovs}" + f"{missing_fovs}" ) diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index fd551e3e..a47fbd8d 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -226,6 +226,7 @@ def test_watcher_run_timeout( @patch("toffy.watcher_callbacks.visualize_mph", side_effect=mock_visualize_mph) @pytest.mark.parametrize("add_blank", [False, True]) @pytest.mark.parametrize("temp_bin", [False, True]) +@pytest.mark.parametrize("missing_fov", [False, True]) @parametrize_with_cases( "run_cbs,int_cbs,fov_cbs,kwargs,validators,watcher_start_lag,existing_data", cases=WatcherCases ) @@ -241,6 +242,7 @@ def test_watcher( existing_data, add_blank, temp_bin, + missing_fov, ): try: with tempfile.TemporaryDirectory() as tmpdir: @@ -267,11 +269,22 @@ def test_watcher( fov_callback, run_callback, intermediate_callback = build_callbacks( run_cbs, int_cbs, fov_cbs, **kwargs ) - write_json_file( - json_path=os.path.join(run_data, "test_run.json"), - json_object=COMBINED_RUN_JSON_SPOOF, - encoding="utf-8", - ) + if missing_fov: + large_run_json_spoof = COMBINED_RUN_JSON_SPOOF.copy() + large_run_json_spoof["fovs"] = COMBINED_RUN_JSON_SPOOF["fovs"] + [ + {"runOrder": 5, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}} + ] + write_json_file( + json_path=os.path.join(run_data, "test_run.json"), + json_object=large_run_json_spoof, + encoding="utf-8", + ) + else: + write_json_file( + json_path=os.path.join(run_data, "test_run.json"), + json_object=COMBINED_RUN_JSON_SPOOF, + encoding="utf-8", + ) # if existing_data set to True, test case where a FOV has already been extracted if existing_data[0]: @@ -323,6 +336,10 @@ def test_watcher( ) if existing_data[0] and existing_data[1] == "Full": watcher_warnings.append(r"already extracted for FOV fov-2-scan-1") + if missing_fov: + watcher_warnings.append( + "The following FOVs were not processed due to missing/empty/late files:" + ) if len(watcher_warnings) > 0: with pytest.warns(UserWarning, match="|".join(watcher_warnings)): From 8dd97b73bab88b00f8e3a2dff78cf57776e6f46c Mon Sep 17 00:00:00 2001 From: csowers Date: Wed, 9 Aug 2023 17:12:07 -0700 Subject: [PATCH 07/23] warning test for json_utils --- tests/json_utils_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/json_utils_test.py b/tests/json_utils_test.py index c98c3c4c..1de3999b 100644 --- a/tests/json_utils_test.py +++ b/tests/json_utils_test.py @@ -260,12 +260,12 @@ def test_missing_fov_check(): # check missing bin file raises warning os.remove(os.path.join(temp_dir, "fov-1-scan-1.bin")) - with pytest.raises(FileNotFoundError): + with pytest.warns(): json_utils.missing_fov_check(temp_dir, run_file_name) _make_blank_file(temp_dir, "fov-1-scan-1.bin") # check empty json file raises warning os.remove(os.path.join(temp_dir, "fov-2-scan-1.json")) _make_blank_file(temp_dir, "fov-2-scan-1.json") - with pytest.raises(FileNotFoundError): + with pytest.warns(): json_utils.missing_fov_check(temp_dir, run_file_name) From 2f21dd4e1be14ae256821433f1d94167db6d106e Mon Sep 17 00:00:00 2001 From: csowers Date: Thu, 10 Aug 2023 12:07:29 -0700 Subject: [PATCH 08/23] skip moly --- src/toffy/json_utils.py | 6 +++++- tests/json_utils_test.py | 8 +++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/toffy/json_utils.py b/src/toffy/json_utils.py index 72350d27..81676660 100644 --- a/src/toffy/json_utils.py +++ b/src/toffy/json_utils.py @@ -259,6 +259,9 @@ def missing_fov_check(bin_file_dir, run_name): missing_fovs = {} for fov in run_metadata.get("fovs", ()): + if fov.get("StandardTarget" == "Molybdenum Foil"): + continue + # get fov names fov_number = fov.get("runOrder") default_name = f"fov-{fov_number}-scan-1" @@ -274,7 +277,8 @@ def missing_fov_check(bin_file_dir, run_name): missing_fovs[default_name] = [custom_name] if missing_fovs: - missing_fovs = pd.DataFrame(missing_fovs).to_string(index=False) + missing_fovs = pd.DataFrame(missing_fovs, index=[0]).T + missing_fovs.columns = ["fov_name"] warnings.warn( "The following FOVs were not processed due to missing/empty/late files: \n" f"{missing_fovs}" diff --git a/tests/json_utils_test.py b/tests/json_utils_test.py index 1de3999b..dc1ccec9 100644 --- a/tests/json_utils_test.py +++ b/tests/json_utils_test.py @@ -241,10 +241,8 @@ def test_missing_fov_check(): "fovs": [ {"runOrder": 1, "name": "image_1"}, {"runOrder": 2, "name": "image_2"}, - { - "runOrder": 3, - "name": "image_3", - }, + {"runOrder": 3, "name": "image_3"}, + {"runOrder": 4, "name": "image_3", "StandardTarget": "Molybdenum Foil"}, ] } json_utils.write_json_file( @@ -255,7 +253,7 @@ def test_missing_fov_check(): json_utils.write_json_file(os.path.join(temp_dir, fov + ".json"), ["test_data"]) _make_blank_file(temp_dir, fov + ".bin") - # test success + # test success (ignore missing moly) json_utils.missing_fov_check(temp_dir, run_file_name) # check missing bin file raises warning From 9af3e4615cd04dbbb676a5cd0deba71b595b6028 Mon Sep 17 00:00:00 2001 From: csowers Date: Thu, 10 Aug 2023 12:08:12 -0700 Subject: [PATCH 09/23] remove watcher tests --- tests/fov_watcher_test.py | 28 ++++++---------------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index a47fbd8d..a08c7c0d 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -226,7 +226,6 @@ def test_watcher_run_timeout( @patch("toffy.watcher_callbacks.visualize_mph", side_effect=mock_visualize_mph) @pytest.mark.parametrize("add_blank", [False, True]) @pytest.mark.parametrize("temp_bin", [False, True]) -@pytest.mark.parametrize("missing_fov", [False, True]) @parametrize_with_cases( "run_cbs,int_cbs,fov_cbs,kwargs,validators,watcher_start_lag,existing_data", cases=WatcherCases ) @@ -242,7 +241,6 @@ def test_watcher( existing_data, add_blank, temp_bin, - missing_fov, ): try: with tempfile.TemporaryDirectory() as tmpdir: @@ -269,22 +267,12 @@ def test_watcher( fov_callback, run_callback, intermediate_callback = build_callbacks( run_cbs, int_cbs, fov_cbs, **kwargs ) - if missing_fov: - large_run_json_spoof = COMBINED_RUN_JSON_SPOOF.copy() - large_run_json_spoof["fovs"] = COMBINED_RUN_JSON_SPOOF["fovs"] + [ - {"runOrder": 5, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}} - ] - write_json_file( - json_path=os.path.join(run_data, "test_run.json"), - json_object=large_run_json_spoof, - encoding="utf-8", - ) - else: - write_json_file( - json_path=os.path.join(run_data, "test_run.json"), - json_object=COMBINED_RUN_JSON_SPOOF, - encoding="utf-8", - ) + + write_json_file( + json_path=os.path.join(run_data, "test_run.json"), + json_object=COMBINED_RUN_JSON_SPOOF, + encoding="utf-8", + ) # if existing_data set to True, test case where a FOV has already been extracted if existing_data[0]: @@ -336,10 +324,6 @@ def test_watcher( ) if existing_data[0] and existing_data[1] == "Full": watcher_warnings.append(r"already extracted for FOV fov-2-scan-1") - if missing_fov: - watcher_warnings.append( - "The following FOVs were not processed due to missing/empty/late files:" - ) if len(watcher_warnings) > 0: with pytest.warns(UserWarning, match="|".join(watcher_warnings)): From 6c0276e7c9b34d85afa019a9ef56670782eb4862 Mon Sep 17 00:00:00 2001 From: csowers Date: Thu, 10 Aug 2023 13:16:09 -0700 Subject: [PATCH 10/23] separate watcher test --- tests/fov_watcher_test.py | 52 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index 7ec9c18a..ee1362ed 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -399,3 +399,55 @@ def test_watcher( except OSError: warnings.warn("Temporary file cleanup was incomplete.") + + +def test_watcher_missing_fovs(): + with tempfile.TemporaryDirectory() as tmpdir: + # add extra fov to run file + large_run_json_spoof = COMBINED_RUN_JSON_SPOOF.copy() + large_run_json_spoof["fovs"] = COMBINED_RUN_JSON_SPOOF["fovs"] + [ + {"runOrder": 5, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}} + ] + + run_data = os.path.join(tmpdir, "test_run") + log_out = os.path.join(tmpdir, "log_output") + os.makedirs(run_data) + fov_callback, run_callback, intermediate_callback = build_callbacks( + run_callbacks=["check_missing_fovs"], + intermediate_callbacks=[], + fov_callbacks=[], + ) + + write_json_file( + json_path=os.path.join(run_data, "test_run.json"), + json_object=large_run_json_spoof, + encoding="utf-8", + ) + + # start watcher + with Pool(processes=4) as pool: + pool.apply_async( + _slow_copy_sample_tissue_data, + (run_data, SLOW_COPY_INTERVAL_S, False, False), + ) + + # should raise warning for missing fov data + with pytest.warns( + UserWarning, + match="The following FOVs were not processed due to missing/empty/late files:", + ): + res_scan = pool.apply_async( + start_watcher, + ( + run_data, + log_out, + fov_callback, + run_callback, + intermediate_callback, + 2700, + 1, + SLOW_COPY_INTERVAL_S, + ), + ) + + res_scan.get() From 76933ec5c81e24022b279d945b1c001ab80e304f Mon Sep 17 00:00:00 2001 From: csowers Date: Fri, 11 Aug 2023 11:45:15 -0700 Subject: [PATCH 11/23] test on existing data --- tests/fov_watcher_test.py | 42 +++++++++++++++------------------------ 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index ee1362ed..b5d660fa 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -410,6 +410,8 @@ def test_watcher_missing_fovs(): ] run_data = os.path.join(tmpdir, "test_run") + for file in os.listdir(COMBINED_DATA_PATH): + shutil.copy(os.path.join(COMBINED_DATA_PATH, file), os.path.join(run_data, file)) log_out = os.path.join(tmpdir, "log_output") os.makedirs(run_data) fov_callback, run_callback, intermediate_callback = build_callbacks( @@ -424,30 +426,18 @@ def test_watcher_missing_fovs(): encoding="utf-8", ) - # start watcher - with Pool(processes=4) as pool: - pool.apply_async( - _slow_copy_sample_tissue_data, - (run_data, SLOW_COPY_INTERVAL_S, False, False), + # watcher should raise warning for missing fov data + with pytest.warns( + UserWarning, + match="The following FOVs were not processed due to missing/empty/late files:", + ): + res_scan = start_watcher( + run_data, + log_out, + fov_callback, + run_callback, + intermediate_callback, + 2700, + 1, + SLOW_COPY_INTERVAL_S, ) - - # should raise warning for missing fov data - with pytest.warns( - UserWarning, - match="The following FOVs were not processed due to missing/empty/late files:", - ): - res_scan = pool.apply_async( - start_watcher, - ( - run_data, - log_out, - fov_callback, - run_callback, - intermediate_callback, - 2700, - 1, - SLOW_COPY_INTERVAL_S, - ), - ) - - res_scan.get() From 7e62a67887bca7724baaf56dd9e525eed1235380 Mon Sep 17 00:00:00 2001 From: csowers Date: Fri, 11 Aug 2023 12:17:32 -0700 Subject: [PATCH 12/23] watcher notebook --- templates/3a_monitor_MIBI_run.ipynb | 8 +++++--- tests/fov_watcher_test.py | 7 ++----- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/templates/3a_monitor_MIBI_run.ipynb b/templates/3a_monitor_MIBI_run.ipynb index b04af332..3b736e0d 100644 --- a/templates/3a_monitor_MIBI_run.ipynb +++ b/templates/3a_monitor_MIBI_run.ipynb @@ -93,7 +93,9 @@ "\n", "* The `plot_mph_metrics` run callback will compute the median pulse height data for each FOV, and plot the results once the run has completed. Additional arguments are: `regression` which when set to True will also plot the linear regression line for the data.
(See [3d_compute_median_pulse_height](./3d_compute_median_pulse_height.ipynb) for more details.)\n", "\n", - "* The `image_stitching` run callback will create a single image, which stitched together all FOV images for a specific channel. Additional arguments are: `channels`.
(See [3e_stitch_images](./3e_stitch_images.ipynb) for more details.)" + "* The `image_stitching` run callback will create a single image, which stitched together all FOV images for a specific channel. Additional arguments are: `channels`.
(See [3e_stitch_images](./3e_stitch_images.ipynb) for more details.)\n", + "\n", + "* The `check_missing_fovs` run callback checks that the run produces the appropriate .bin and .json all files for all FOVs included in the run file." ] }, { @@ -103,7 +105,7 @@ "outputs": [], "source": [ "fov_callback, run_callback, intermediate_callback = build_callbacks(\n", - " run_callbacks = ['image_stitching'],\n", + " run_callbacks = ['image_stitching', 'check_missing_fovs'],\n", " intermediate_callbacks = ['plot_qc_metrics', 'plot_mph_metrics'],\n", " fov_callbacks = ['extract_tiffs', 'generate_pulse_heights'],\n", " tiff_out_dir=extraction_dir,\n", @@ -143,7 +145,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.4" }, "vscode": { "interpreter": { diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index b5d660fa..62712a37 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -410,7 +410,7 @@ def test_watcher_missing_fovs(): ] run_data = os.path.join(tmpdir, "test_run") - for file in os.listdir(COMBINED_DATA_PATH): + for file in io_utils.list_files(COMBINED_DATA_PATH, substrs=[".bin", ".json"]): shutil.copy(os.path.join(COMBINED_DATA_PATH, file), os.path.join(run_data, file)) log_out = os.path.join(tmpdir, "log_output") os.makedirs(run_data) @@ -431,13 +431,10 @@ def test_watcher_missing_fovs(): UserWarning, match="The following FOVs were not processed due to missing/empty/late files:", ): - res_scan = start_watcher( + start_watcher( run_data, log_out, fov_callback, run_callback, intermediate_callback, - 2700, - 1, - SLOW_COPY_INTERVAL_S, ) From 480dd2638093fba4dfa7e927f71d1bd24f3b31c9 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 14 Aug 2023 11:57:38 -0700 Subject: [PATCH 13/23] moly fix --- src/toffy/json_utils.py | 2 +- tests/fov_watcher_test.py | 2 +- tests/json_utils_test.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/toffy/json_utils.py b/src/toffy/json_utils.py index 81676660..dab99f5c 100644 --- a/src/toffy/json_utils.py +++ b/src/toffy/json_utils.py @@ -259,7 +259,7 @@ def missing_fov_check(bin_file_dir, run_name): missing_fovs = {} for fov in run_metadata.get("fovs", ()): - if fov.get("StandardTarget" == "Molybdenum Foil"): + if fov.get("standardTarget") == "Molybdenum Foil": continue # get fov names diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index 62712a37..21e7434f 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -410,10 +410,10 @@ def test_watcher_missing_fovs(): ] run_data = os.path.join(tmpdir, "test_run") + os.makedirs(run_data) for file in io_utils.list_files(COMBINED_DATA_PATH, substrs=[".bin", ".json"]): shutil.copy(os.path.join(COMBINED_DATA_PATH, file), os.path.join(run_data, file)) log_out = os.path.join(tmpdir, "log_output") - os.makedirs(run_data) fov_callback, run_callback, intermediate_callback = build_callbacks( run_callbacks=["check_missing_fovs"], intermediate_callbacks=[], diff --git a/tests/json_utils_test.py b/tests/json_utils_test.py index dc1ccec9..7b2ce0fc 100644 --- a/tests/json_utils_test.py +++ b/tests/json_utils_test.py @@ -242,7 +242,7 @@ def test_missing_fov_check(): {"runOrder": 1, "name": "image_1"}, {"runOrder": 2, "name": "image_2"}, {"runOrder": 3, "name": "image_3"}, - {"runOrder": 4, "name": "image_3", "StandardTarget": "Molybdenum Foil"}, + {"runOrder": 4, "name": "image_3", "standardTarget": "Molybdenum Foil"}, ] } json_utils.write_json_file( From 9fdeaf53d3fddf57ab6b8446172bb107d84dd4bb Mon Sep 17 00:00:00 2001 From: csowers Date: Wed, 23 Aug 2023 11:36:30 -0700 Subject: [PATCH 14/23] Au in panel --- data/sample_panel.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/sample_panel.csv b/data/sample_panel.csv index 62a653d1..91480cce 100644 --- a/data/sample_panel.csv +++ b/data/sample_panel.csv @@ -1,5 +1,5 @@ ,Mass,Target,Start,Stop -0,0.6166032817280968,Calprotectin,0.31660328172809676,0.6166032817280968 +0,0.6166032817280968,Au,0.31660328172809676,0.6166032817280968 1,0.7361271393405203,Chymase,0.43612713934052033,0.7361271393405203 2,0.3882306801433095,SMA,0.08823068014330954,0.3882306801433095 3,0.21260791446689753,Vimentin,-0.08739208553310246,0.21260791446689753 From e41170aa3d790fe016ce5be0539659537c952ea9 Mon Sep 17 00:00:00 2001 From: csowers Date: Wed, 23 Aug 2023 12:15:31 -0700 Subject: [PATCH 15/23] fix merge fail --- pyproject.toml | 2 +- src/toffy/bin_extraction.py | 61 +++++++++++++++++++++- src/toffy/normalize.py | 12 ++++- src/toffy/panel_utils.py | 32 ++++++++++++ templates/3b_extract_images_from_bin.ipynb | 44 +++++++++++++--- templates/4b_normalize_image_data.ipynb | 2 + tests/bin_extraction_test.py | 50 +++++++++++++++++- tests/fov_watcher_test.py | 32 ++++++++++-- tests/normalize_test.py | 27 ++++++++-- tests/panel_utils_test.py | 31 +++++++++++ tests/utils/normalize_test_cases.py | 13 +++-- tests/watcher_callbacks_test.py | 14 ++++- 12 files changed, 294 insertions(+), 26 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5a15e8ff..a7d35144 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ metadata = false [tool.poetry.dependencies] python = ">=3.9,<3.12" alpineer = ">=0.1.9" -mibi-bin-tools = "0.2.9" +mibi-bin-tools = "0.2.10" ipywidgets = "^8" numpy = "1.*" natsort = "^8" diff --git a/src/toffy/bin_extraction.py b/src/toffy/bin_extraction.py index 3dbd400c..1532512c 100644 --- a/src/toffy/bin_extraction.py +++ b/src/toffy/bin_extraction.py @@ -1,10 +1,14 @@ +import os +import re import warnings import natsort as ns -from alpineer import io_utils +import numpy as np +import pandas as pd +from alpineer import io_utils, load_utils from mibi_bin_tools import bin_files -from toffy.json_utils import check_for_empty_files, list_moly_fovs +from toffy.json_utils import check_for_empty_files, list_moly_fovs, read_json_file def extract_missing_fovs( @@ -71,3 +75,56 @@ def extract_missing_fovs( print("Extraction completed!") else: warnings.warn(f"No viable bin files were found in {bin_file_dir}", Warning) + + +def incomplete_fov_check( + bin_file_dir, extraction_dir, num_rows=10, num_channels=5, signal_percent=0.02 +): + """Read in the supplied number tiff files for each FOV to check for incomplete images + Args: + bin_file_dir (str): directory containing the run json file + extraction_dir (str): directory containing the extracted tifs + num_rows (int): number of bottom rows of the images to check for zero values + num_channels (int): number of channel images to check per FOV + signal_percent (float): min amount of non-zero signal required for complete FOVs + Raises: + Warning if any FOVs have only partially generated images + """ + + io_utils.validate_paths([bin_file_dir, extraction_dir]) + + # read in json file to get custom fov names + run_name = os.path.basename(bin_file_dir) + run_file_path = os.path.join(bin_file_dir, run_name + ".json") + run_metadata = read_json_file(run_file_path, encoding="utf-8") + + # get fov and channel info + fovs = io_utils.list_folders(extraction_dir, "fov") + channels = io_utils.list_files(os.path.join(extraction_dir, fovs[0]), ".tiff") + channels_subset = channels[:num_channels] + if "Au.tiff" not in channels_subset: + channels_subset = channels_subset[:-1] + ["Au.tiff"] + + incomplete_fovs = {} + for fov in fovs: + # load in channel images + img_data = load_utils.load_imgs_from_tree( + extraction_dir, fovs=[fov], channels=channels_subset + ) + row_index = img_data.shape[1] - num_rows + img_bottoms = img_data[0, row_index:, :, :] + + # check percentage of non-zero pixels in the bottom of the image + total_pixels = img_data.shape[1] * num_rows * num_channels + if np.count_nonzero(img_bottoms) / total_pixels < signal_percent: + i = re.findall(r"\d+", fov)[0] + custom_name = run_metadata["fovs"][int(i) - 1]["name"] + incomplete_fovs[fov] = custom_name + + if incomplete_fovs: + incomplete_fovs = pd.DataFrame(incomplete_fovs, index=[0]).T + incomplete_fovs.columns = ["fov_name"] + warnings.warn( + "\nThe following FOVs were only partially generated and need to be re-ran: \n" + f"{incomplete_fovs}" + ) diff --git a/src/toffy/normalize.py b/src/toffy/normalize.py index 2921d116..1a1c2c23 100644 --- a/src/toffy/normalize.py +++ b/src/toffy/normalize.py @@ -47,7 +47,9 @@ def write_counts_per_mass(base_dir, output_dir, fov, masses, start_offset=0.5, s out_df.to_csv(os.path.join(output_dir, fov + "_channel_counts.csv"), index=False) -def write_mph_per_mass(base_dir, output_dir, fov, masses, start_offset=0.5, stop_offset=0.5): +def write_mph_per_mass( + base_dir, output_dir, fov, masses, start_offset=0.5, stop_offset=0.5, proficient=False +): """Records the median pulse height (MPH) per mass for the specified FOV Args: @@ -57,6 +59,7 @@ def write_mph_per_mass(base_dir, output_dir, fov, masses, start_offset=0.5, stop masses (list): the list of masses to extract MPH from start_offset (float): beginning value for calculating mph values stop_offset (float): ending value for calculating mph values + proficient (bool): whether proficient MPH data is written or not """ # hold computed values mph_vals = [] @@ -72,7 +75,10 @@ def write_mph_per_mass(base_dir, output_dir, fov, masses, start_offset=0.5, stop # create df to hold output fovs = np.repeat(fov, len(masses)) out_df = pd.DataFrame({"mass": masses, "fov": fovs, "pulse_height": mph_vals}) - out_df.to_csv(os.path.join(output_dir, fov + "_pulse_heights.csv"), index=False) + pulse_heights_file = ( + fov + "_pulse_heights_proficient.csv" if proficient else fov + "_pulse_heights.csv" + ) + out_df.to_csv(os.path.join(output_dir, pulse_heights_file), index=False) def create_objective_function(obj_func): @@ -222,7 +228,9 @@ def combine_run_metrics(run_dir, substring, warn_overwrite=True): substring(str): the substring contained within the files to be combined warn_overwrite (bool): whether to warn if existing `_combined.csv` file found""" + # retrieve all pulse height files, but ignore anything mass proficient files = io_utils.list_files(run_dir, substring) + files = [file for file in files if "_proficient" not in file] # validate inputs if len(files) == 0: diff --git a/src/toffy/panel_utils.py b/src/toffy/panel_utils.py index a08728f0..178e9023 100644 --- a/src/toffy/panel_utils.py +++ b/src/toffy/panel_utils.py @@ -105,6 +105,38 @@ ) +def modify_panel_ranges(panel: pd.DataFrame, start_offset: float = 0, stop_offset: float = 0): + """Adjust the offsets of a given panel. + Only applicable for masses with ranges separated by 0.3 between 'Stop' and 'Start'. + Args: + panel (pd.DataFrame): panel dataframe with columns Mass, Target, Start, and Stop. + start_offset (float): the value to add to the `'Start'` column. + stop_offset (float): the value to add to the `'Stop'` column. + Returns: + pd.DataFrame: + Updated panel with `start_offset` added to `'Start`' column, + likewise for `stop_offset` and `'Stop'` column. + """ + panel_new = panel.copy() + + # extract only rows where 'Start' - 'End' = -0.3, round to account for floating point error + panel_rows_modify = panel_new[ + (panel_new["Start"] - panel_new["Stop"]).round(1) == -0.3 + ].index.values + + # add start_offset to 'Start' column + panel_new.loc[panel_rows_modify, "Start"] = ( + panel_new.loc[panel_rows_modify, "Start"].copy() + start_offset + ) + + # add stop_offset to 'Stop' column + panel_new.loc[panel_rows_modify, "Stop"] = ( + panel_new.loc[panel_rows_modify, "Stop"].copy() + stop_offset + ) + + return panel_new + + def merge_duplicate_masses(panel): """Check a panel df for duplicate mass values and return a unique mass panel with the target names combined diff --git a/templates/3b_extract_images_from_bin.ipynb b/templates/3b_extract_images_from_bin.ipynb index abeda208..e0f2a452 100644 --- a/templates/3b_extract_images_from_bin.ipynb +++ b/templates/3b_extract_images_from_bin.ipynb @@ -24,7 +24,7 @@ "import os\n", "\n", "from toffy.panel_utils import load_panel\n", - "from toffy.bin_extraction import extract_missing_fovs" + "from toffy.bin_extraction import extract_missing_fovs, incomplete_fov_check" ] }, { @@ -35,7 +35,8 @@ "## Required variables\n", "You will need to define the following two arguments for this notebook.\n", " - `run_name` should contain the exact name of the MIBI run to extract from\n", - " - `panel_path` should point to a panel csv specifying the targets on your panel. You can download your panel online from the Ionpath MibiTracker and then copy the file to the `C:\\\\Users\\\\Customer.ION\\\\Documents\\panel_files` directory (see [panel format](https://github.com/angelolab/toffy#panel-format) for more information)" + " - `panel_path` should point to a panel csv specifying the targets on your panel. You can download your panel online from the Ionpath MibiTracker and then copy the file to the `C:\\\\Users\\\\Customer.ION\\\\Documents\\panel_files` directory (see [panel format](https://github.com/angelolab/toffy#panel-format) for more information)\n", + " - `extract_prof` indicates whether you want to include mass proficient extraction on top of the default (mass deficient) extraction" ] }, { @@ -49,7 +50,8 @@ "source": [ "# set up args for current run\n", "run_name = 'YYYY-MM-DD_run_name'\n", - "panel_path = 'C:\\\\Users\\\\Customer.ION\\\\Documents\\\\panel_files\\\\my_cool_panel.csv'" + "panel_path = 'C:\\\\Users\\\\Customer.ION\\\\Documents\\\\panel_files\\\\my_cool_panel.csv'\n", + "extract_prof = True" ] }, { @@ -78,7 +80,13 @@ "# path to directory containing extracted files\n", "extraction_dir = os.path.join('D:\\\\Extracted_Images', run_name) \n", "if not os.path.exists(extraction_dir):\n", - " os.makedirs(extraction_dir)" + " os.makedirs(extraction_dir)\n", + "\n", + "if extract_prof:\n", + " # path to directory containing mass-proficient extracted files (for long-term storage)\n", + " extraction_prof_dir = os.path.join('D:\\\\Extracted_Images', run_name + '_proficient')\n", + " if not os.path.exists(extraction_prof_dir):\n", + " os.makedirs(extraction_prof_dir)" ] }, { @@ -97,7 +105,31 @@ "metadata": {}, "outputs": [], "source": [ - "extract_missing_fovs(base_dir, extraction_dir, panel)" + "# base deficient extraction\n", + "extract_missing_fovs(base_dir, extraction_dir, panel)\n", + "\n", + "# mass proficient extraction (for long-term storage)\n", + "if extract_prof:\n", + " extract_missing_fovs(base_dir, extraction_prof_dir, modify_panel_ranges(panel, start_offset=0.3, stop_offset=0.3))" + ] + }, + { + "cell_type": "markdown", + "id": "4f366be5-5ee7-4ecf-8db1-397fd7f6c0b9", + "metadata": {}, + "source": [ + "## Check for any incomplete FOVs \n", + "If the instrument is shut off part way through a run, this can result in output FOVs which are generated, but missing counts in parts of the images. The cell below will check for any incompete FOVs and warn you if any exist." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ba3af2f-d926-44cd-b688-5dab7e45513c", + "metadata": {}, + "outputs": [], + "source": [ + "incomplete_fov_check(bin_file_dir, extraction_dir)" ] } ], @@ -117,7 +149,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.16" + "version": "3.9.16" } }, "nbformat": 4, diff --git a/templates/4b_normalize_image_data.ipynb b/templates/4b_normalize_image_data.ipynb index 2bfe9ec0..0f6798fa 100644 --- a/templates/4b_normalize_image_data.ipynb +++ b/templates/4b_normalize_image_data.ipynb @@ -137,6 +137,8 @@ "for fov in fovs:\n", " # generate mph values\n", " mph_file_path = os.path.join(mph_run_dir, fov + '_pulse_heights.csv')\n", + " \n", + " # base MPH normalization\n", " if not os.path.exists(mph_file_path):\n", " normalize.write_mph_per_mass(base_dir=os.path.join(bin_base_dir, run_name), output_dir=mph_run_dir, \n", " fov=fov, masses=panel['Mass'].values, start_offset=0.3, stop_offset=0)" diff --git a/tests/bin_extraction_test.py b/tests/bin_extraction_test.py index 72ae9777..d58e809b 100644 --- a/tests/bin_extraction_test.py +++ b/tests/bin_extraction_test.py @@ -1,14 +1,16 @@ import os import shutil import tempfile +import warnings from pathlib import Path from unittest.mock import call, patch import natsort as ns import pandas as pd import pytest -from alpineer import io_utils, test_utils +from alpineer import image_utils, io_utils, load_utils, test_utils +from tests.utils.test_utils import make_run_file from toffy import bin_extraction @@ -130,3 +132,49 @@ def test_extract_missing_fovs(mocked_print): bin_extraction.extract_missing_fovs( bin_file_dir, extraction_dir, panel, extract_intensities=False ) + + +def test_incomplete_fov_check(): + with tempfile.TemporaryDirectory() as tmpdir: + bin_dir = make_run_file(tmpdir, prefixes=[""]) + extraction_dir = os.path.join(tmpdir, "extracted_images", "test_run") + test_utils._write_tifs( + extraction_dir, + ["fov-1-scan-1", "fov-2-scan-1", "fov-4-scan-1"], + ["Au", "chan2"], + (20, 20), + None, + False, + "uint32", + ) + + # test no partial FOVs, no warning + bin_extraction.incomplete_fov_check(bin_dir, extraction_dir, num_rows=10) + + # change fov-2 to have zero values in the bottom of image + fov2_data = load_utils.load_imgs_from_tree(extraction_dir, fovs=["fov-2-scan-1"]) + fov2_data[:, 10:, :, 0] = 0 + image_utils.save_image( + os.path.join(extraction_dir, "fov-2-scan-1", f"Au.tiff"), + fov2_data.loc["fov-2-scan-1", :, :, "Au"], + ) + + # test warning for partial fovs (checking 1 channel img) + with pytest.warns(): + bin_extraction.incomplete_fov_check( + bin_dir, extraction_dir, num_rows=10, num_channels=1 + ) + + # test that increasing the number of rows to check causes no warning + with warnings.catch_warnings(): + warnings.simplefilter("error") + bin_extraction.incomplete_fov_check( + bin_dir, extraction_dir, num_rows=20, num_channels=1 + ) + + # test that increasing the number of channels to check causes no warning + with warnings.catch_warnings(): + warnings.simplefilter("error") + bin_extraction.incomplete_fov_check( + bin_dir, extraction_dir, num_rows=10, num_channels=2 + ) diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index 21e7434f..9d040258 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -89,15 +89,31 @@ def _slow_copy_sample_tissue_data( COMBINED_RUN_JSON_SPOOF = { "fovs": [ - {"runOrder": 1, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}}, - {"runOrder": 2, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}}, + { + "runOrder": 1, + "scanCount": 1, + "name": "R1C1", + "frameSizePixels": {"width": 32, "height": 32}, + }, + { + "runOrder": 2, + "scanCount": 1, + "name": "R2C1", + "frameSizePixels": {"width": 32, "height": 32}, + }, { "runOrder": 3, "scanCount": 1, + "name": "R1C2", "frameSizePixels": {"width": 32, "height": 32}, "standardTarget": "Molybdenum Foil", }, - {"runOrder": 4, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}}, + { + "runOrder": 4, + "scanCount": 1, + "name": "R2C2", + "frameSizePixels": {"width": 32, "height": 32}, + }, ], } @@ -242,6 +258,7 @@ def test_watcher( add_blank, temp_bin, ): + print("The watcher start lag is: %d" % watcher_start_lag) try: with tempfile.TemporaryDirectory() as tmpdir: tiff_out_dir = os.path.join(tmpdir, "cb_0", RUN_DIR_NAME) @@ -267,7 +284,6 @@ def test_watcher( fov_callback, run_callback, intermediate_callback = build_callbacks( run_cbs, int_cbs, fov_cbs, **kwargs ) - write_json_file( json_path=os.path.join(run_data, "test_run.json"), json_object=COMBINED_RUN_JSON_SPOOF, @@ -379,6 +395,14 @@ def test_watcher( # extract tiffs check validators[0](os.path.join(tmpdir, "cb_0", RUN_DIR_NAME), fovs, bad_fovs) + if kwargs["extract_prof"]: + validators[0]( + os.path.join(tmpdir, "cb_0", RUN_DIR_NAME + "_proficient"), fovs, bad_fovs + ) + else: + assert not os.path.exists( + os.path.join(tmpdir, "cb_0", RUN_DIR_NAME) + "_proficient" + ) # qc check validators[1](os.path.join(tmpdir, "cb_1", RUN_DIR_NAME), fovs, bad_fovs) diff --git a/tests/normalize_test.py b/tests/normalize_test.py index 4a163284..9d98966c 100644 --- a/tests/normalize_test.py +++ b/tests/normalize_test.py @@ -83,6 +83,11 @@ def test_write_mph_per_mass(mocker): assert set(output["mass"].values) == set(masses) assert np.all(output["pulse_height"].values == output["mass"].values * 2) + normalize.write_mph_per_mass( + base_dir=temp_dir, output_dir=out_dir, fov="fov1", masses=masses, proficient=True + ) + assert os.path.exists(os.path.join(out_dir, "fov1_pulse_heights_proficient.csv")) + @parametrize( "obj_func_name, num_params", @@ -152,16 +157,20 @@ def test_create_prediction_function(obj_func, num_params): @parametrize("warn_overwrite_test", [True, False]) def test_combine_run_metrics(metrics, warn_overwrite_test): with tempfile.TemporaryDirectory() as temp_dir: - for metric in metrics: + for metric in metrics["deficient"]: name, values_df = metric[0], pd.DataFrame(metric[1]) values_df.to_csv(os.path.join(temp_dir, name), index=False) + for metric in metrics["proficient"]: + name_prof, values_df_prof = metric[0], pd.DataFrame(metric[1]) + values_df_prof.to_csv(os.path.join(temp_dir, name_prof), index=False) + normalize.combine_run_metrics(temp_dir, "pulse_height") combined_data = pd.read_csv(os.path.join(temp_dir, "pulse_height_combined.csv")) assert np.array_equal(combined_data.columns, ["pulse_height", "mass", "fov"]) - assert len(combined_data) == len(metrics) * 10 + assert len(combined_data) == len(metrics["deficient"]) * 10 # check that previously generated combined file is removed with warning # NOTE: only if warn_overwrite turned on @@ -172,7 +181,7 @@ def test_combine_run_metrics(metrics, warn_overwrite_test): normalize.combine_run_metrics(temp_dir, "pulse_height", warn_overwrite_test) # check that files with different lengths raises error - name, bad_vals = metrics[0][0], pd.DataFrame(metrics[0][1]) + name, bad_vals = metrics["deficient"][0][0], pd.DataFrame(metrics["deficient"][0][1]) bad_vals = bad_vals.loc[0:5, :] bad_vals.to_csv(os.path.join(temp_dir, name), index=False) @@ -491,7 +500,7 @@ def test_create_fitted_pulse_heights_file(tmpdir, test_zeros, metrics): # create metric files pulse_dir = os.path.join(tmpdir, "pulse_heights") os.makedirs(pulse_dir) - for metric in metrics: + for metric in metrics["deficient"]: name, values_df = metric[0], pd.DataFrame(metric[1]) # if test_zeros, set first mass pulse height to 0 for every FOV @@ -500,6 +509,10 @@ def test_create_fitted_pulse_heights_file(tmpdir, test_zeros, metrics): values_df.to_csv(os.path.join(pulse_dir, name), index=False) + for metric in metrics["proficient"]: + name_prof, values_df_prof = metric[0], pd.DataFrame(metric[1]) + values_df_prof.to_csv(os.path.join(pulse_dir, name_prof), index=False) + panel = test_cases.panel fovs = natsort.natsorted(test_cases.fovs) @@ -633,10 +646,14 @@ def test_normalize_image_data(tmpdir, metrics): pulse_height_dir = os.path.join(tmpdir, "pulse_height_dir") os.makedirs(pulse_height_dir) - for metric in metrics: + for metric in metrics["deficient"]: name, values_df = metric[0], pd.DataFrame(metric[1]) values_df.to_csv(os.path.join(pulse_height_dir, name), index=False) + for metric in metrics["proficient"]: + name_prof, values_df_prof = metric[0], pd.DataFrame(metric[1]) + values_df_prof.to_csv(os.path.join(pulse_height_dir, name_prof), index=False) + # create directory with image data img_dir = os.path.join(tmpdir, "img_dir") os.makedirs(img_dir) diff --git a/tests/panel_utils_test.py b/tests/panel_utils_test.py index 62cffc6a..99adc658 100644 --- a/tests/panel_utils_test.py +++ b/tests/panel_utils_test.py @@ -9,6 +9,37 @@ from toffy import panel_utils +def test_modify_panel_ranges(): + # only Calprotectin's range should be adjusted + toffy_panel = pd.DataFrame( + { + "Mass": [69, 71, 89], + "Target": ["Calprotectin", "Chymase", "Mast Cell Tryptase"], + "Start": [68.7, 70.6, 89.3], + "Stop": [69, 71, 89], + } + ) + + toffy_panel_pos_offset = panel_utils.modify_panel_ranges( + toffy_panel, start_offset=0.3, stop_offset=0.3 + ) + + assert list(toffy_panel_pos_offset["Start"]) == [69, 70.6, 89.3] + assert list(toffy_panel_pos_offset["Stop"]) == [69.3, 71, 89] + + toffy_panel_neg_offset = panel_utils.modify_panel_ranges( + toffy_panel, start_offset=-0.3, stop_offset=-0.3 + ) + + assert list(toffy_panel_neg_offset["Start"]) == [68.4, 70.6, 89.3] + assert list(toffy_panel_neg_offset["Stop"]) == [68.7, 71, 89] + + toffy_panel_one_offset = panel_utils.modify_panel_ranges(toffy_panel, stop_offset=-0.3) + + assert list(toffy_panel_one_offset["Start"]) == [68.7, 70.6, 89.3] + assert list(toffy_panel_one_offset["Stop"]) == [68.7, 71, 89] + + def test_merge_duplicate_masses(): duplicate_panel = pd.DataFrame( { diff --git a/tests/utils/normalize_test_cases.py b/tests/utils/normalize_test_cases.py index 9e239b11..c028fc3d 100644 --- a/tests/utils/normalize_test_cases.py +++ b/tests/utils/normalize_test_cases.py @@ -96,14 +96,21 @@ def case_low_count_warn(self): class CombineRunMetricFiles: def case_default_metrics(self): - # create full directory of files + # create full directory of files, include proficient data which should be ignored metrics = [] + metrics_prof = [] for i in range(0, 4): metric_name = "pulse_heights_{}.csv".format(i) + metric_prof_name = "pulse_heights_{}_proficient.csv".format(i) metric_values = { "pulse_height": np.random.rand(10), "mass": masses, "fov": [fovs[i]] * 10, } - metrics.append([metric_name, metric_values]) - return metrics + metric_prof_values = { + "pulse_height": np.random.rand(10), + "mass": masses, + "fov": [fovs[i]] * 10, + } + metrics_prof.append([metric_prof_name, metric_prof_values]) + return {"deficient": metrics, "proficient": metrics_prof} diff --git a/tests/watcher_callbacks_test.py b/tests/watcher_callbacks_test.py index ff4713f0..2fd9c30a 100644 --- a/tests/watcher_callbacks_test.py +++ b/tests/watcher_callbacks_test.py @@ -23,8 +23,18 @@ COMBINED_RUN_JSON_SPOOF = { "fovs": [ - {"runOrder": 1, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}}, - {"runOrder": 2, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}}, + { + "runOrder": 1, + "scanCount": 1, + "name": "R1C1", + "frameSizePixels": {"width": 32, "height": 32}, + }, + { + "runOrder": 2, + "scanCount": 1, + "name": "R1C2", + "frameSizePixels": {"width": 32, "height": 32}, + }, ], } From c5ca41415cd0af293b9c4560be9dcee8833700cf Mon Sep 17 00:00:00 2001 From: csowers Date: Wed, 23 Aug 2023 12:20:13 -0700 Subject: [PATCH 16/23] fix merge fail 2.0 --- src/toffy/bin_extraction.py | 1 + src/toffy/panel_utils.py | 3 +++ tests/utils/normalize_test_cases.py | 1 + tests/watcher_callbacks_test.py | 4 ++++ 4 files changed, 9 insertions(+) diff --git a/src/toffy/bin_extraction.py b/src/toffy/bin_extraction.py index 1532512c..4de77d24 100644 --- a/src/toffy/bin_extraction.py +++ b/src/toffy/bin_extraction.py @@ -87,6 +87,7 @@ def incomplete_fov_check( num_rows (int): number of bottom rows of the images to check for zero values num_channels (int): number of channel images to check per FOV signal_percent (float): min amount of non-zero signal required for complete FOVs + Raises: Warning if any FOVs have only partially generated images """ diff --git a/src/toffy/panel_utils.py b/src/toffy/panel_utils.py index 178e9023..9abb3d3e 100644 --- a/src/toffy/panel_utils.py +++ b/src/toffy/panel_utils.py @@ -107,11 +107,14 @@ def modify_panel_ranges(panel: pd.DataFrame, start_offset: float = 0, stop_offset: float = 0): """Adjust the offsets of a given panel. + Only applicable for masses with ranges separated by 0.3 between 'Stop' and 'Start'. + Args: panel (pd.DataFrame): panel dataframe with columns Mass, Target, Start, and Stop. start_offset (float): the value to add to the `'Start'` column. stop_offset (float): the value to add to the `'Stop'` column. + Returns: pd.DataFrame: Updated panel with `start_offset` added to `'Start`' column, diff --git a/tests/utils/normalize_test_cases.py b/tests/utils/normalize_test_cases.py index c028fc3d..ed034973 100644 --- a/tests/utils/normalize_test_cases.py +++ b/tests/utils/normalize_test_cases.py @@ -112,5 +112,6 @@ def case_default_metrics(self): "mass": masses, "fov": [fovs[i]] * 10, } + metrics.append([metric_name, metric_values]) metrics_prof.append([metric_prof_name, metric_prof_values]) return {"deficient": metrics, "proficient": metrics_prof} diff --git a/tests/watcher_callbacks_test.py b/tests/watcher_callbacks_test.py index 2fd9c30a..b6265746 100644 --- a/tests/watcher_callbacks_test.py +++ b/tests/watcher_callbacks_test.py @@ -140,6 +140,10 @@ def test_build_callbacks(viz_mock, run_callbacks, inter_callbacks, kwargs, data_ icb(run_data) check_extraction_dir_structure(extracted_dir, point_names, [], ["SMA"]) + if kwargs["extract_prof"]: + check_extraction_dir_structure(extracted_dir + "_proficient", point_names, [], ["SMA"]) + else: + assert not os.path.exists(extracted_dir + "_proficient") check_qc_dir_structure(qc_dir, point_names, [], "save_dir" in kwargs) check_mph_dir_structure(qc_dir, plot_dir, point_names, [], combined=True) check_stitched_dir_structure(stitched_dir, ["SMA"]) From 058d4ab5ecae63acecf70a459c7c506861127cb3 Mon Sep 17 00:00:00 2001 From: csowers Date: Wed, 23 Aug 2023 12:27:52 -0700 Subject: [PATCH 17/23] fix poetry file --- poetry.lock | 289 +++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 252 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8829ecc0..fea679c9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "22.1.0" description = "File support for asyncio." +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -15,6 +16,7 @@ files = [ name = "aiosqlite" version = "0.19.0" description = "asyncio bridge to the standard sqlite3 module" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -30,6 +32,7 @@ docs = ["sphinx (==6.1.3)", "sphinx-mdinclude (==0.5.3)"] name = "alpineer" version = "0.1.9" description = "Toolbox for Multiplexed Imaging. Contains scripts and little tools which are used throughout ark-analysis, mibi-bin-tools, and toffy." +category = "main" optional = false python-versions = ">=3.9,<4.0" files = [ @@ -41,9 +44,9 @@ files = [ charset-normalizer = ">=2.1.1,<3.0.0" matplotlib = ">=3,<4" natsort = ">=8,<9" -numpy = "==1.*" +numpy = ">=1.0.0,<2.0.0" pillow = ">=9,<10" -scikit-image = "==0.*" +scikit-image = "<1.0.0" tifffile = "*" xarray = "*" xmltodict = ">=0.13.0,<0.14.0" @@ -52,6 +55,7 @@ xmltodict = ">=0.13.0,<0.14.0" name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -73,6 +77,7 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "main" optional = false python-versions = "*" files = [ @@ -84,6 +89,7 @@ files = [ name = "argon2-cffi" version = "21.3.0" description = "The secure Argon2 password hashing algorithm." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -103,6 +109,7 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -140,6 +147,7 @@ tests = ["pytest"] name = "arrow" version = "1.2.3" description = "Better dates & times for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -154,6 +162,7 @@ python-dateutil = ">=2.7.0" name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" +category = "main" optional = false python-versions = "*" files = [ @@ -171,6 +180,7 @@ test = ["astroid", "pytest"] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -189,6 +199,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -200,6 +211,7 @@ files = [ name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" +category = "main" optional = false python-versions = "*" files = [ @@ -211,6 +223,7 @@ files = [ name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" +category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -229,6 +242,7 @@ lxml = ["lxml"] name = "black" version = "22.12.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -264,6 +278,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -282,6 +297,7 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -293,6 +309,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = "*" files = [ @@ -369,6 +386,7 @@ pycparser = "*" name = "charset-normalizer" version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -383,6 +401,7 @@ unicode-backport = ["unicodedata2"] name = "click" version = "8.1.3" description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -397,6 +416,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -408,6 +428,7 @@ files = [ name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -427,6 +448,7 @@ typing = ["mypy (>=0.990)"] name = "contourpy" version = "1.1.0" description = "Python library for calculating contours of 2D quadrilateral grids" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -485,6 +507,7 @@ test-no-images = ["pytest", "pytest-cov", "wurlitzer"] name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -550,6 +573,7 @@ toml = ["tomli"] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" +category = "dev" optional = false python-versions = ">= 3.5" files = [ @@ -558,7 +582,7 @@ files = [ ] [package.dependencies] -coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" +coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" @@ -569,6 +593,7 @@ yaml = ["PyYAML (>=3.10)"] name = "cycler" version = "0.11.0" description = "Composable style cycles" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -580,9 +605,49 @@ files = [ name = "cython" version = "0.29.36" description = "The Cython compiler for writing C extensions for the Python language." +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ + {file = "Cython-0.29.36-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea33c1c57f331f5653baa1313e445fbe80d1da56dd9a42c8611037887897b9d"}, + {file = "Cython-0.29.36-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2fe34615c13ace29e77bf9d21c26188d23eff7ad8b3e248da70404e5f5436b95"}, + {file = "Cython-0.29.36-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ae75eac4f13cbbcb50b2097470dcea570182446a3ebd0f7e95dd425c2017a2d7"}, + {file = "Cython-0.29.36-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:847d07fc02978c4433d01b4f5ee489b75fd42fd32ccf9cc4b5fd887e8cffe822"}, + {file = "Cython-0.29.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7cb44aeaf6c5c25bd6a7562ece4eadf50d606fc9b5f624fa95bd0281e8bf0a97"}, + {file = "Cython-0.29.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:28fb10aabd56a2e4d399273b48e106abe5a0d271728fd5eed3d36e7171000045"}, + {file = "Cython-0.29.36-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:86b7a13c6b23ab6471d40a320f573fbc8a4e39833947eebed96661145dc34771"}, + {file = "Cython-0.29.36-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:19ccf7fc527cf556e2e6a3dfeffcadfbcabd24a59a988289117795dfed8a25ad"}, + {file = "Cython-0.29.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:74bddfc7dc8958526b2018d3adc1aa6dc9cf2a24095c972e5ad06758c360b261"}, + {file = "Cython-0.29.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6c4d7e36fe0211e394adffd296382b435ac22762d14f2fe45c506c230f91cf2d"}, + {file = "Cython-0.29.36-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:0bca6a7504e8cfc63a4d3c7c9b9a04e5d05501942a6c8cee177363b61a32c2d4"}, + {file = "Cython-0.29.36-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17c74f80b06e2fa8ffc8acd41925f4f9922da8a219cd25c6901beab2f7c56cc5"}, + {file = "Cython-0.29.36-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:25ff471a459aad82146973b0b8c177175ab896051080713d3035ad4418739f66"}, + {file = "Cython-0.29.36-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9738f23d00d99481797b155ad58f8fc1c72096926ea2554b8ccc46e1d356c27"}, + {file = "Cython-0.29.36-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af2f333f08c4c279f3480532341bf70ec8010bcbc7d8a6daa5ca0bf4513af295"}, + {file = "Cython-0.29.36-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:cd77cedbcc13cb67aef39b8615fd50a67fc42b0c6defea6fc0a21e19d3a062ec"}, + {file = "Cython-0.29.36-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50d506d73a46c4a522ef9fdafcbf7a827ba13907b18ff58f61a8fa0887d0bd8d"}, + {file = "Cython-0.29.36-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:6a571d7c7b52ee12d73bc65b4855779c069545da3bac26bec06a1389ad17ade5"}, + {file = "Cython-0.29.36-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a216b2801c7d9c3babe0a10cc25da3bc92494d7047d1f732d3c47b0cceaf0941"}, + {file = "Cython-0.29.36-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:68abee3be27f21c9642a07a93f8333d491f4c52bc70068e42f51685df9ac1a57"}, + {file = "Cython-0.29.36-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1ef90023da8a9bf84cf16f06186db0906d2ce52a09f751e2cb9d3da9d54eae46"}, + {file = "Cython-0.29.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9deef0761e8c798043dbb728a1c6df97b26e5edc65b8d6c7608b3c07af3eb722"}, + {file = "Cython-0.29.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:69af2365de2343b4e5a61c567e7611ddf2575ae6f6e5c01968f7d4f2747324eb"}, + {file = "Cython-0.29.36-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:fdf377b0f6e9325b73ad88933136023184afdc795caeeaaf3dca13494cffd15e"}, + {file = "Cython-0.29.36-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ff2cc5518558c598028ae8d9a43401e0e734b74b6e598156b005328c9da3472"}, + {file = "Cython-0.29.36-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7ca921068242cd8b52544870c807fe285c1f248b12df7b6dfae25cc9957b965e"}, + {file = "Cython-0.29.36-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6058a6d04e04d790cda530e1ff675e9352359eb4b777920df3cac2b62a9a030f"}, + {file = "Cython-0.29.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:de2045ceae1857e56a72f08e0acfa48c994277a353b7bdab1f097db9f8803f19"}, + {file = "Cython-0.29.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9f2a4b4587aaef08815410dc20653613ca04a120a2954a92c39e37c6b5fdf6be"}, + {file = "Cython-0.29.36-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2edd9f8edca69178d74cbbbc180bc3e848433c9b7dc80374a11a0bb0076c926d"}, + {file = "Cython-0.29.36-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c6c0aea8491a70f98b7496b5057c9523740e02cec21cd678eef609d2aa6c1257"}, + {file = "Cython-0.29.36-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:768f65b16d23c630d8829ce1f95520ef1531a9c0489fa872d87c8c3813f65aee"}, + {file = "Cython-0.29.36-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:568625e8274ee7288ad87b0f615ec36ab446ca9b35e77481ed010027d99c7020"}, + {file = "Cython-0.29.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bdc0a4cb99f55e6878d4b67a4bfee23823484915cb6b7e9c9dd01002dd3592ea"}, + {file = "Cython-0.29.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f0df6552be39853b10dfb5a10dbd08f5c49023d6b390d7ce92d4792a8b6e73ee"}, + {file = "Cython-0.29.36-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:8894db6f5b6479a3c164e0454e13083ebffeaa9a0822668bb2319bdf1b783df1"}, + {file = "Cython-0.29.36-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53f93a8c342e9445a8f0cb7039775294f2dbbe5241936573daeaf0afe30397e4"}, + {file = "Cython-0.29.36-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ee317f9bcab901a3db39c34ee5a27716f7132e5c0de150125342694d18b30f51"}, + {file = "Cython-0.29.36-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e4b8269e5a5d127a2191b02b9df3636c0dac73f14f1ff8a831f39cb5197c4f38"}, {file = "Cython-0.29.36-py2.py3-none-any.whl", hash = "sha256:95bb13d8be507425d03ebe051f90d4b2a9fdccc64e4f30b35645fdb7542742eb"}, {file = "Cython-0.29.36.tar.gz", hash = "sha256:41c0cfd2d754e383c9eeb95effc9aa4ab847d0c9747077ddd7c0dcb68c3bc01f"}, ] @@ -591,6 +656,7 @@ files = [ name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -618,6 +684,7 @@ files = [ name = "decopatch" version = "1.4.10" description = "Create decorators easily in python." +category = "dev" optional = false python-versions = "*" files = [ @@ -632,6 +699,7 @@ makefun = ">=1.5.0" name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -643,6 +711,7 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -654,6 +723,7 @@ files = [ name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" +category = "dev" optional = false python-versions = "*" files = [ @@ -664,6 +734,7 @@ files = [ name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -678,6 +749,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = "*" files = [ @@ -692,6 +764,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "fastjsonschema" version = "2.17.1" description = "Fastest Python implementation of JSON schema" +category = "dev" optional = false python-versions = "*" files = [ @@ -706,6 +779,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "fonttools" version = "4.40.0" description = "Tools to manipulate font files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -763,6 +837,7 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -774,6 +849,7 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -785,6 +861,7 @@ files = [ name = "imageio" version = "2.31.1" description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -816,6 +893,7 @@ tifffile = ["tifffile"] name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -835,6 +913,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -853,6 +932,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -864,6 +944,7 @@ files = [ name = "ipykernel" version = "6.24.0" description = "IPython Kernel for Jupyter" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -877,7 +958,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -897,6 +978,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.14.0" description = "IPython: Productive Interactive Computing" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -936,6 +1018,7 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" +category = "dev" optional = false python-versions = "*" files = [ @@ -947,6 +1030,7 @@ files = [ name = "ipywidgets" version = "8.0.7" description = "Jupyter interactive widgets" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -968,6 +1052,7 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -982,6 +1067,7 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -999,6 +1085,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1018,6 +1105,7 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1035,6 +1123,7 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.3.1" description = "Lightweight pipelining with Python functions" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1046,6 +1135,7 @@ files = [ name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "dev" optional = false python-versions = "*" files = [ @@ -1060,6 +1150,7 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1071,6 +1162,7 @@ files = [ name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1098,6 +1190,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1107,7 +1200,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1121,6 +1214,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-contrib-core" version = "0.4.2" description = "Common utilities for jupyter-contrib projects." +category = "dev" optional = false python-versions = "*" files = [ @@ -1141,6 +1235,7 @@ testing-utils = ["mock", "nose"] name = "jupyter-contrib-nbextensions" version = "0.7.0" description = "A collection of Jupyter nbextensions." +category = "dev" optional = false python-versions = "*" files = [ @@ -1166,6 +1261,7 @@ test = ["mock", "nbformat", "nose", "pip", "requests"] name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1186,6 +1282,7 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.6.3" description = "Jupyter Event System library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1210,6 +1307,7 @@ test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>= name = "jupyter-highlight-selected-word" version = "0.2.0" description = "Jupyter notebook extension that enables highlighting every instance of the current word in the notebook." +category = "dev" optional = false python-versions = "*" files = [ @@ -1221,6 +1319,7 @@ files = [ name = "jupyter-nbextensions-configurator" version = "0.6.3" description = "jupyter serverextension providing configuration interfaces for nbextensions." +category = "dev" optional = false python-versions = "*" files = [ @@ -1242,6 +1341,7 @@ test = ["jupyter-contrib-core[testing-utils]", "mock", "nose", "requests", "sele name = "jupyter-server" version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1254,7 +1354,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1278,6 +1378,7 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-fileid" version = "0.9.0" description = "" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1297,6 +1398,7 @@ test = ["jupyter-server[test] (>=1.15,<3)", "pytest", "pytest-cov"] name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1316,6 +1418,7 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyter-server-ydoc" version = "0.8.0" description = "A Jupyter Server Extension Providing Y Documents." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1335,6 +1438,7 @@ test = ["coverage", "jupyter-server[test] (>=2.0.0a0)", "pytest (>=7.0)", "pytes name = "jupyter-ydoc" version = "0.2.4" description = "Document structures for collaborative editing using Ypy" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1354,6 +1458,7 @@ test = ["pre-commit", "pytest", "pytest-asyncio", "websockets (>=10.0)", "ypy-we name = "jupyterlab" version = "3.6.5" description = "JupyterLab computational environment" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1382,6 +1487,7 @@ test = ["check-manifest", "coverage", "jupyterlab-server[test]", "pre-commit", " name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1393,6 +1499,7 @@ files = [ name = "jupyterlab-server" version = "2.23.0" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1419,6 +1526,7 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1430,6 +1538,7 @@ files = [ name = "kiwisolver" version = "1.4.4" description = "A fast implementation of the Cassowary constraint solver" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1507,6 +1616,7 @@ files = [ name = "lazy-loader" version = "0.3" description = "lazy_loader" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1522,6 +1632,7 @@ test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] name = "loguru" version = "0.7.0" description = "Python logging made (stupidly) simple" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1540,16 +1651,20 @@ dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegu name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -1558,6 +1673,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -1577,6 +1693,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -1586,6 +1703,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -1595,6 +1713,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -1604,6 +1723,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -1614,13 +1734,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -1636,6 +1759,7 @@ source = ["Cython (>=0.29.35)"] name = "makefun" version = "1.15.1" description = "Small library to dynamically create python functions." +category = "dev" optional = false python-versions = "*" files = [ @@ -1647,6 +1771,7 @@ files = [ name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1706,6 +1831,7 @@ files = [ name = "matplotlib" version = "3.7.1" description = "Python plotting package" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1768,6 +1894,7 @@ python-dateutil = ">=2.7" name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1780,42 +1907,43 @@ traitlets = "*" [[package]] name = "mibi-bin-tools" -version = "0.2.9" +version = "0.2.10" description = "Source for extracting .bin files from the commercial MIBI." +category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "mibi-bin-tools-0.2.9.tar.gz", hash = "sha256:5805c13bad198a2afa9fff0d9430886d36941c9fa401c4ccdefa0267871792cb"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20a8ef2b08a71dc851fabb0b10eb2add13dc8d0b568774ecdf8cceccbf360732"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c445b74bdfa0b8330ae5bdc26baea2b730215f279bb9cd6fc2755c3853544d4a"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c87d3443a5ed549c5c2ca1496e6eba2ed1ead9a8e7fe3e13e43638be16d820a"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07b33d20accfe4a22125b2671abe02394b26119ff5104e2e86bddef7b362b7a1"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cb675e187378ea18a203bc30c2fd7ac483ad1d3a8141e9a304fc019d264959ad"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:778d2b47c676b77ca0126999b22ea5a35e477db2dc7425da2fba2b72f5227408"}, - {file = "mibi_bin_tools-0.2.9-cp310-cp310-win_arm64.whl", hash = "sha256:5409e55672ae541b52b5b6a711801db3aec591d850bbb40288ddc7f54ea4deed"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b5d8667724cd8bffac305549cc0db14a6a56a82fa818afc4152c2168daa8b385"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aad32651d305d5f6c62e436c9c74fe7bdfef93ff067005de53169fa3ddbeabbb"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:125797c0ccc2df9ee63c1f3d0fc1a515ef4e93b1e5dc5f7290a014d973ac61e0"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f97ed4d2ce378cd41080f612564571092b7a2b8a636533b188cecda8f6db443"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3dda02ecfbe0a9857605f2cb3378cd921c563774fd129fa1eac109308b1014ac"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:7fb09d5a9b9cc8833136c0aeac89ba341bd7caff463f3976928e1d649f496dc6"}, - {file = "mibi_bin_tools-0.2.9-cp311-cp311-win_arm64.whl", hash = "sha256:122c21ea616879215ff3eb0eb50c3dbd2d1de55312dfee4e4b7076d0849df7ec"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7cc4e2d0e5dcf1ad8925b73894813c3c7bfea6fec1f383346dbf4c8bfbb1669"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d65e469d8b1bd7a4c7d91843212127e22a99c9f483773d7edd4984ae6541b7b"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:16312441a2d05eb8095761aebe3900b7918ae01ac5dd3c68519ae2e8a942f1e3"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e589e3c3987bae783138c5ebc18c90cabfe3ca64e469c7f98dc75bf643e87340"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed29a947c51f397c35466b5bac7a79a1f0802cbd93008b76f2467cb38f42ad0"}, - {file = "mibi_bin_tools-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:a123f129388ec00ebe14860207ab2bd744b2bda569aa4d0b773b7ccee7419f9e"}, -] - -[package.dependencies] -alpineer = ">=0.1.5" + {file = "mibi-bin-tools-0.2.10.tar.gz", hash = "sha256:cfcf2048dc4beaa5765bb1a505f7f220586a6cb3b8cadb0083f7088df81d4cb9"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:33a329f9c2829a75b423b585271dfe4b1579546b28c829b3912fc03fa263d6bc"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c13ed68961e542001d9601e2a14a61922ca609c0ddc582d8da470961ad7b8c18"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37808e7573434cdd37fe3b1dc046a584a86331c62043e8843ae3d182b01cb3ce"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17e7c74503940c8fbd7b9719ee760249ea0b94f32f760440578ea1607a6b50ac"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:320fa40d7bbd5a48a6c4d41f5ae3199721832e78f713d76a31674c9186c51680"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:5d8269daaeada5d0fe78f29d04f43f394b41bc692ce2b028d39acff11cf05a09"}, + {file = "mibi_bin_tools-0.2.10-cp310-cp310-win_arm64.whl", hash = "sha256:1a902fe83453f89604f0f85259de100d0c33106feaeefbc6e0a288ed277492d2"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d6e0e6a659e63a1d6ec74b2e5a4b897a8e4e8358a6e5398532ba84d22949bb3"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:502ec354602e7bcab6540589cfeb3c519f759e3070bdb2dac5aafaa040da35a4"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b54361251da52d16f682f69b0c65425cae433dc3c630a6ca8555e6a78c15e817"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:610bb1d96cb69cff47b01c16f43091bef6496f950f452eea25b8fed669029043"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9de67b7c10f62016b53d5781d34d315a97b1e3f5b401bdcca2bae2902194480"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:d164ed5876000907d4527b879db3598bcae6d513092f40880340783ef1d00fca"}, + {file = "mibi_bin_tools-0.2.10-cp311-cp311-win_arm64.whl", hash = "sha256:a45261b0e97554f1ec740a14f6b34869e2c933878989b3ad9e5b871f67eac7cc"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a8589e3633993f1f77f99923f85169942649a209df6c0ab017a20c70de4e97c1"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bda7ae14658dcfb630ce5e84e55d2f79b37a84ee98245a093894f7aefc7eec2e"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e4f424579d6bb4780daa493e3cf2fbbfd8e8f245123fbfdffdba9ab0bf60f22"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29d1b0e0db90676a2988f4e4349b734bea105b050311d007196aab2ffb12f00c"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:06b7ba4b049c5aa5b98911995ea59c7ba71ae9dd2d08c49f4b83e82b461fb48e"}, + {file = "mibi_bin_tools-0.2.10-cp39-cp39-win_amd64.whl", hash = "sha256:f45c7f29bd3b6e6bfd62585ede1808e3c3b24b19c0dab60489a6ca959534c56f"}, +] + +[package.dependencies] +alpineer = ">=0.1.9" Cython = ">=0.29.24" matplotlib = ">=3" numpy = ">=1.2" -pandas = ">=1.3" +pandas = ">=2" scikit-image = ">=0.19" -xarray = ">=2022" +xarray = ">=2023" [package.extras] test = ["coveralls[toml]", "pytest", "pytest-cases", "pytest-cov", "pytest-mock", "pytest-pycodestyle"] @@ -1824,6 +1952,7 @@ test = ["coveralls[toml]", "pytest", "pytest-cases", "pytest-cov", "pytest-mock" name = "mistune" version = "3.0.1" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1835,6 +1964,7 @@ files = [ name = "mock" version = "5.0.2" description = "Rolling backport of unittest.mock for all Pythons" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1851,6 +1981,7 @@ test = ["pytest", "pytest-cov"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1862,6 +1993,7 @@ files = [ name = "natsort" version = "8.4.0" description = "Simple yet flexible natural sorting in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1877,6 +2009,7 @@ icu = ["PyICU (>=1.0.0)"] name = "nbclassic" version = "1.0.0" description = "Jupyter Notebook as a Jupyter Server extension." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1912,6 +2045,7 @@ test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-jupyter", "pytest-p name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1921,7 +2055,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -1934,6 +2068,7 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.6.0" description = "Converting Jupyter Notebooks" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1972,6 +2107,7 @@ webpdf = ["pyppeteer (>=1,<1.1)"] name = "nbformat" version = "5.9.0" description = "The Jupyter Notebook format" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1993,6 +2129,7 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.6" description = "Patch asyncio to allow nested event loops" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2004,6 +2141,7 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2022,6 +2160,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "notebook" version = "6.5.4" description = "A web-based notebook environment for interactive computing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2056,6 +2195,7 @@ test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixs name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2073,6 +2213,7 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "numpy" version = "1.25.0" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -2107,6 +2248,7 @@ files = [ name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2118,6 +2260,7 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2129,6 +2272,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2196,6 +2340,7 @@ xml = ["lxml (>=4.6.3)"] name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2207,6 +2352,7 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2222,6 +2368,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2233,6 +2380,7 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "main" optional = false python-versions = "*" files = [ @@ -2247,6 +2395,7 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" +category = "main" optional = false python-versions = "*" files = [ @@ -2258,6 +2407,7 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2337,6 +2487,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "platformdirs" version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2352,6 +2503,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2367,6 +2519,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.17.0" description = "Python client for the Prometheus monitoring system." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2381,6 +2534,7 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2395,6 +2549,7 @@ wcwidth = "*" name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2421,6 +2576,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -2432,6 +2588,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "main" optional = false python-versions = "*" files = [ @@ -2446,6 +2603,7 @@ tests = ["pytest"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2457,6 +2615,7 @@ files = [ name = "pycodestyle" version = "2.10.0" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2468,6 +2627,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2479,6 +2639,7 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2493,6 +2654,7 @@ plugins = ["importlib-metadata"] name = "pyparsing" version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -2507,6 +2669,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2543,6 +2706,7 @@ files = [ name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2565,6 +2729,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cases" version = "3.6.14" description = "Separate test code from test cases in pytest." +category = "dev" optional = false python-versions = "*" files = [ @@ -2580,6 +2745,7 @@ makefun = ">=1.9.5" name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2598,6 +2764,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2615,6 +2782,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-pycodestyle" version = "2.3.1" description = "pytest plugin to run pycodestyle" +category = "dev" optional = false python-versions = "~=3.7" files = [ @@ -2633,6 +2801,7 @@ tests = ["pytest-isort"] name = "pytest-randomly" version = "3.12.0" description = "Pytest plugin to randomly order tests and control random.seed." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2648,6 +2817,7 @@ pytest = "*" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2662,6 +2832,7 @@ six = ">=1.5" name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2673,6 +2844,7 @@ files = [ name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -2684,6 +2856,7 @@ files = [ name = "pywavelets" version = "1.4.1" description = "PyWavelets, wavelet transform module" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2721,6 +2894,7 @@ numpy = ">=1.17.3" name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -2744,6 +2918,7 @@ files = [ name = "pywinpty" version = "2.0.10" description = "Pseudo terminal support for Windows from Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2759,6 +2934,7 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2808,6 +2984,7 @@ files = [ name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2897,6 +3074,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2918,6 +3096,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2932,6 +3111,7 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2943,6 +3123,7 @@ files = [ name = "scikit-image" version = "0.21.0" description = "Image processing in Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2966,6 +3147,7 @@ files = [ {file = "scikit_image-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ef5d8d1099317b7b315b530348cbfa68ab8ce32459de3c074d204166951025c"}, {file = "scikit_image-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b1e96c59cab640ca5c5b22c501524cfaf34cbe0cb51ba73bd9a9ede3fb6e1d"}, {file = "scikit_image-0.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:9cffcddd2a5594c0a06de2ae3e1e25d662745a26f94fda31520593669677c010"}, + {file = "scikit_image-0.21.0.tar.gz", hash = "sha256:b33e823c54e6f11873ea390ee49ef832b82b9f70752c8759efd09d5a4e3d87f0"}, ] [package.dependencies] @@ -2992,6 +3174,7 @@ test = ["asv", "matplotlib (>=3.5)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytes name = "scikit-learn" version = "1.3.0" description = "A set of python modules for machine learning and data mining" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3034,6 +3217,7 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc ( name = "scipy" version = "1.11.1" description = "Fundamental algorithms for scientific computing in Python" +category = "main" optional = false python-versions = "<3.13,>=3.9" files = [ @@ -3070,6 +3254,7 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "seaborn" version = "0.12.2" description = "Statistical data visualization" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3091,6 +3276,7 @@ stats = ["scipy (>=1.3)", "statsmodels (>=0.10)"] name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3107,6 +3293,7 @@ win32 = ["pywin32"] name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3123,6 +3310,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3134,6 +3322,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3145,6 +3334,7 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3156,6 +3346,7 @@ files = [ name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "main" optional = false python-versions = "*" files = [ @@ -3175,6 +3366,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3195,6 +3387,7 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "threadpoolctl" version = "3.1.0" description = "threadpoolctl" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3206,6 +3399,7 @@ files = [ name = "tifffile" version = "2023.7.4" description = "Read and write TIFF files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3223,6 +3417,7 @@ all = ["defusedxml", "fsspec", "imagecodecs (>=2023.1.23)", "lxml", "matplotlib" name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3241,6 +3436,7 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3252,6 +3448,7 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" optional = false python-versions = ">= 3.8" files = [ @@ -3272,6 +3469,7 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3292,6 +3490,7 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3307,6 +3506,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3318,6 +3518,7 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -3329,6 +3530,7 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3343,6 +3545,7 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "2.0.3" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3360,6 +3563,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3399,6 +3603,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -3410,6 +3615,7 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3425,6 +3631,7 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "dev" optional = false python-versions = "*" files = [ @@ -3436,6 +3643,7 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3452,6 +3660,7 @@ test = ["websockets"] name = "widgetsnbextension" version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3463,6 +3672,7 @@ files = [ name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3477,6 +3687,7 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] name = "xarray" version = "2023.6.0" description = "N-D labeled arrays and datasets in Python" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3501,6 +3712,7 @@ viz = ["matplotlib", "nc-time-axis", "seaborn"] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3512,6 +3724,7 @@ files = [ name = "y-py" version = "0.5.9" description = "Python bindings for the Y-CRDT built from yrs (Rust)" +category = "dev" optional = false python-versions = "*" files = [ @@ -3587,6 +3800,7 @@ files = [ name = "ypy-websocket" version = "0.8.2" description = "WebSocket connector for Ypy" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3606,6 +3820,7 @@ test = ["mypy", "pre-commit", "pytest", "pytest-asyncio", "websockets (>=10.0)"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3620,4 +3835,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "2fe07f2c6eb19f355a5dd660202faf5465ebcc36ad7beb2212588b1ec62e1ea1" +content-hash = "df51137177a85a2f22a8d85d3efc048f58ae4df0c74636088aa093e8d1288d25" From 8dc4c5d8e99563ae75219de5ddad7f423055c275 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 28 Aug 2023 14:22:17 -0700 Subject: [PATCH 18/23] timeout for file generation --- src/toffy/fov_watcher.py | 55 ++++++++++++++++++++++++++++++++++----- tests/fov_watcher_test.py | 14 +++++++--- 2 files changed, 58 insertions(+), 11 deletions(-) diff --git a/src/toffy/fov_watcher.py b/src/toffy/fov_watcher.py index 7e33dd16..13febba6 100644 --- a/src/toffy/fov_watcher.py +++ b/src/toffy/fov_watcher.py @@ -1,6 +1,6 @@ import logging import os -import platform +import threading import time import warnings from datetime import datetime @@ -29,16 +29,16 @@ class RunStructure: fov_progress (dict): Whether or not an expected file has been created """ - def __init__(self, run_folder: str, timeout: int = 10 * 60): + def __init__(self, run_folder: str, fov_timeout: int = 10 * 60): """initializes RunStructure by parsing run json within provided run folder Args: run_folder (str): path to run folder - timeout (int): + fov_timeout (int): number of seconds to wait for non-null filesize before raising an error """ - self.timeout = timeout + self.timeout = fov_timeout self.fov_progress = {} self.processed_fovs = [] self.moly_points = [] @@ -183,7 +183,7 @@ def __init__( fov_callback: Callable[[str, str], None], run_callback: Callable[[str], None], intermediate_callback: Callable[[str], None] = None, - timeout: int = 1.03 * 60 * 60, + fov_timeout: int = 1.03 * 60 * 60, ): """Initializes FOV_EventHandler @@ -198,12 +198,17 @@ def __init__( callback to run over the entire run intermediate_callback (Callable[[None], None]): run callback overriden to run on each fov - timeout (int): + fov_timeout (int): number of seconds to wait for non-null filesize before raising an error """ super().__init__() self.run_folder = run_folder + self.last_event_time = datetime.now() + self.timer_thread = threading.Thread(target=self.file_timer, args=(fov_timeout,)) + self.timer_thread.daemon = True + self.timer_thread.start() + self.log_path = os.path.join(log_folder, f"{Path(run_folder).parts[-1]}_log.txt") if not os.path.exists(log_folder): os.makedirs(log_folder) @@ -215,7 +220,7 @@ def __init__( ) # create run structure - self.run_structure = RunStructure(run_folder, timeout=timeout) + self.run_structure = RunStructure(run_folder, fov_timeout=fov_timeout) self.fov_func = fov_callback self.run_func = run_callback @@ -471,6 +476,10 @@ def on_created(self, event: FileCreatedEvent, check_last_fov: bool = True): event (FileCreatedEvent): file creation event """ + # reset event creation time + current_time = datetime.now() + self.last_event_time = current_time + # this happens if _check_last_fov gets called by a prior FOV, no need to reprocess if self.last_fov_num_processed == self.run_structure.highest_fov: return @@ -479,6 +488,38 @@ def on_created(self, event: FileCreatedEvent, check_last_fov: bool = True): super().on_created(event) self._run_callbacks(event, check_last_fov) + def file_timer(self, fov_timeout): + """Checks time since last file was generated + Args: + + fov_timeout (int): + how long to wait for fov data to be generated once file detected + """ + while True: + current_time = datetime.now() + time_elapsed = (current_time - self.last_event_time).total_seconds() + + # 3 fov cycles and no new files --> timeout + if time_elapsed > 3 * fov_timeout: + print("Timed out waiting for new file to be generated.") + logging.info( + f'{datetime.now().strftime("%d/%m/%Y %H:%M:%S")} -- Timed out' + "waiting for new file generation.\n" + ) + logging.info( + f'{datetime.now().strftime("%d/%m/%Y %H:%M:%S")} -- ' + f"Running {self.run_func.__name__} on FOVs\n" + ) + + # mark remaining fovs as completed to exit watcher + for fov_name in list(self.run_structure.fov_progress.keys()): + self.run_structure.fov_progress[fov_name] = {"json": True, "bin": True} + + # trigger run callbacks + self.run_func(self.run_folder) + break + time.sleep(fov_timeout) + def on_moved(self, event: FileMovedEvent, check_last_fov: bool = True): """Handles file renaming events diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index 9d040258..320213c7 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -430,7 +430,12 @@ def test_watcher_missing_fovs(): # add extra fov to run file large_run_json_spoof = COMBINED_RUN_JSON_SPOOF.copy() large_run_json_spoof["fovs"] = COMBINED_RUN_JSON_SPOOF["fovs"] + [ - {"runOrder": 5, "scanCount": 1, "frameSizePixels": {"width": 32, "height": 32}} + { + "runOrder": 5, + "scanCount": 1, + "frameSizePixels": {"width": 32, "height": 32}, + "name": "missing_fov", + } ] run_data = os.path.join(tmpdir, "test_run") @@ -450,10 +455,9 @@ def test_watcher_missing_fovs(): encoding="utf-8", ) - # watcher should raise warning for missing fov data + # watcher should raise warning for missing fov data (and not hang waiting for new file) with pytest.warns( - UserWarning, - match="The following FOVs were not processed due to missing/empty/late files:", + warnings.warn("The following FOVs were not processed due to missing/empty/late files:"), ): start_watcher( run_data, @@ -461,4 +465,6 @@ def test_watcher_missing_fovs(): fov_callback, run_callback, intermediate_callback, + completion_check_time=5, + zero_size_timeout=5, ) From e0d664391df065c059201b20fd6d6b111545e60e Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 28 Aug 2023 15:29:38 -0700 Subject: [PATCH 19/23] overall timeout arg --- src/toffy/fov_watcher.py | 26 ++++++++++++++++++++------ tests/fov_watcher_test.py | 5 +++-- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/src/toffy/fov_watcher.py b/src/toffy/fov_watcher.py index 13febba6..819ad1a6 100644 --- a/src/toffy/fov_watcher.py +++ b/src/toffy/fov_watcher.py @@ -29,7 +29,7 @@ class RunStructure: fov_progress (dict): Whether or not an expected file has been created """ - def __init__(self, run_folder: str, fov_timeout: int = 10 * 60): + def __init__(self, run_folder: str, fov_timeout: int = 7800): """initializes RunStructure by parsing run json within provided run folder Args: @@ -183,7 +183,8 @@ def __init__( fov_callback: Callable[[str, str], None], run_callback: Callable[[str], None], intermediate_callback: Callable[[str], None] = None, - fov_timeout: int = 1.03 * 60 * 60, + fov_timeout: int = 7800, + watcher_timeout: int = 3 * 7800, ): """Initializes FOV_EventHandler @@ -200,12 +201,16 @@ def __init__( run callback overriden to run on each fov fov_timeout (int): number of seconds to wait for non-null filesize before raising an error + watcher_timeout (int): + length to wait for new file generation before timing out """ super().__init__() self.run_folder = run_folder self.last_event_time = datetime.now() - self.timer_thread = threading.Thread(target=self.file_timer, args=(fov_timeout,)) + self.timer_thread = threading.Thread( + target=self.file_timer, args=(fov_timeout, watcher_timeout) + ) self.timer_thread.daemon = True self.timer_thread.start() @@ -488,19 +493,21 @@ def on_created(self, event: FileCreatedEvent, check_last_fov: bool = True): super().on_created(event) self._run_callbacks(event, check_last_fov) - def file_timer(self, fov_timeout): + def file_timer(self, fov_timeout, watcher_timeout): """Checks time since last file was generated Args: fov_timeout (int): how long to wait for fov data to be generated once file detected + watcher_timeout (int): + length to wait for new file generation before timing out """ while True: current_time = datetime.now() time_elapsed = (current_time - self.last_event_time).total_seconds() # 3 fov cycles and no new files --> timeout - if time_elapsed > 3 * fov_timeout: + if time_elapsed > watcher_timeout: print("Timed out waiting for new file to be generated.") logging.info( f'{datetime.now().strftime("%d/%m/%Y %H:%M:%S")} -- Timed out' @@ -567,6 +574,7 @@ def start_watcher( run_folder_timeout: int = 5400, completion_check_time: int = 30, zero_size_timeout: int = 7800, + watcher_timeout: int = 3 * 7800, ): """Passes bin files to provided callback functions as they're created @@ -618,7 +626,13 @@ def start_watcher( observer = Observer() event_handler = FOV_EventHandler( - run_folder, log_folder, fov_callback, run_callback, intermediate_callback, zero_size_timeout + run_folder, + log_folder, + fov_callback, + run_callback, + intermediate_callback, + zero_size_timeout, + watcher_timeout, ) observer.schedule(event_handler, run_folder, recursive=True) observer.start() diff --git a/tests/fov_watcher_test.py b/tests/fov_watcher_test.py index 320213c7..0dbea604 100644 --- a/tests/fov_watcher_test.py +++ b/tests/fov_watcher_test.py @@ -465,6 +465,7 @@ def test_watcher_missing_fovs(): fov_callback, run_callback, intermediate_callback, - completion_check_time=5, - zero_size_timeout=5, + completion_check_time=1, + zero_size_timeout=1, + watcher_timeout=1, ) From 604d18cab8c07cd5f628e5d3eb29e52de6846782 Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 28 Aug 2023 15:43:30 -0700 Subject: [PATCH 20/23] notebook update --- ..._resolutions.ipynb => 3f_FOV_checks.ipynb} | 46 +++++++++++++++++-- 1 file changed, 42 insertions(+), 4 deletions(-) rename templates/{3f_check_resolutions.ipynb => 3f_FOV_checks.ipynb} (63%) diff --git a/templates/3f_check_resolutions.ipynb b/templates/3f_FOV_checks.ipynb similarity index 63% rename from templates/3f_check_resolutions.ipynb rename to templates/3f_FOV_checks.ipynb index 651cb304..57ee6293 100644 --- a/templates/3f_check_resolutions.ipynb +++ b/templates/3f_FOV_checks.ipynb @@ -5,8 +5,17 @@ "id": "4857c47b-f476-4494-9a0d-742ef35c0a85", "metadata": {}, "source": [ - "# Check FOV Resolutions\n", - "This notebook will use the run file to check the image resolution for each FOV in the run. Consistent resolution level is important for downstream processing, so it is advised to change the image size of any problematic FOVs." + "# FOV checks\n", + "This notebook will perform two checks on your completed run: ensure all FOVs are the same resolution and verify there are no missing FOV data files." + ] + }, + { + "cell_type": "markdown", + "id": "f8a1d486-4576-464a-bfe3-7e1f584f9a78", + "metadata": {}, + "source": [ + "# FOV checks\n", + "This notebook will perform two checks on your completed run which ensure all FOVs are the same resolution and verify there are no missing FOV data files." ] }, { @@ -17,7 +26,7 @@ "outputs": [], "source": [ "import os\n", - "from toffy.json_utils import check_fov_resolutions\n", + "from toffy.json_utils import check_fov_resolutions, missing_fov_check\n", "from toffy.image_stitching import fix_image_resolutions" ] }, @@ -45,6 +54,15 @@ "extraction_dir = os.path.join('D:\\\\Extracted_Images', run_name) " ] }, + { + "cell_type": "markdown", + "id": "adba5c6c-7b6b-4a7f-8723-b589df680317", + "metadata": {}, + "source": [ + "## FOV resolutions\n", + "This section will use the run file to check the image resolution for each FOV in the run. Consistent resolution level is important for downstream processing, so it is advised to change the image size of any problematic FOVs." + ] + }, { "cell_type": "markdown", "id": "b4edefe7-7f3a-4883-bcb3-47158753c837", @@ -78,7 +96,7 @@ "tags": [] }, "source": [ - "**Step 2: Change image sizes**" + "**Step 2: Change image sizes (if necessary)**" ] }, { @@ -91,6 +109,26 @@ "# change image size for any FOVs with inconsistent resolutions\n", "fix_image_resolutions(resolution_data, extraction_dir)" ] + }, + { + "cell_type": "markdown", + "id": "23bac7b6-e67e-41ac-aee2-903cc6ab7e27", + "metadata": {}, + "source": [ + "## Missing Fovs\n", + "Sometimes FOVs will fail to be generated due to instrument issues, so the function below checks that each FOV specified in the run json has the corresponding (non-empty) output files." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b3f3b134-2f6d-43b7-bba9-4eb4303555ad", + "metadata": {}, + "outputs": [], + "source": [ + "# check for .bin and .json files\n", + "missing_fov_check(bin_file_dir, run_name)" + ] } ], "metadata": { From ef9ddf75f4b92dad2581a22d3e945baf9cfd524d Mon Sep 17 00:00:00 2001 From: csowers Date: Mon, 28 Aug 2023 15:49:38 -0700 Subject: [PATCH 21/23] remove duplicate cell --- templates/3f_FOV_checks.ipynb | 9 --------- 1 file changed, 9 deletions(-) diff --git a/templates/3f_FOV_checks.ipynb b/templates/3f_FOV_checks.ipynb index 57ee6293..574775a1 100644 --- a/templates/3f_FOV_checks.ipynb +++ b/templates/3f_FOV_checks.ipynb @@ -1,14 +1,5 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "4857c47b-f476-4494-9a0d-742ef35c0a85", - "metadata": {}, - "source": [ - "# FOV checks\n", - "This notebook will perform two checks on your completed run: ensure all FOVs are the same resolution and verify there are no missing FOV data files." - ] - }, { "cell_type": "markdown", "id": "f8a1d486-4576-464a-bfe3-7e1f584f9a78", From fec73112e50244a1c5a19a186d87f62e2f7e5ed4 Mon Sep 17 00:00:00 2001 From: csowers Date: Tue, 29 Aug 2023 11:14:10 -0700 Subject: [PATCH 22/23] print fov run stopped on --- src/toffy/fov_watcher.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/toffy/fov_watcher.py b/src/toffy/fov_watcher.py index 819ad1a6..7a9a43c8 100644 --- a/src/toffy/fov_watcher.py +++ b/src/toffy/fov_watcher.py @@ -508,10 +508,12 @@ def file_timer(self, fov_timeout, watcher_timeout): # 3 fov cycles and no new files --> timeout if time_elapsed > watcher_timeout: - print("Timed out waiting for new file to be generated.") + fov_num = self.last_fov_num_processed + fov_name = list(self.run_structure.fov_progress.keys())[fov_num] + print(f"Timed out waiting for {fov_name} files to be generated.") logging.info( f'{datetime.now().strftime("%d/%m/%Y %H:%M:%S")} -- Timed out' - "waiting for new file generation.\n" + f"waiting for {fov_name} files to be generated.\n" ) logging.info( f'{datetime.now().strftime("%d/%m/%Y %H:%M:%S")} -- ' From b5de54dd7816ed95314974366e4cc30a73ea7cfd Mon Sep 17 00:00:00 2001 From: csowers Date: Tue, 29 Aug 2023 13:20:09 -0700 Subject: [PATCH 23/23] notebook updates --- templates/3a_monitor_MIBI_run.ipynb | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/templates/3a_monitor_MIBI_run.ipynb b/templates/3a_monitor_MIBI_run.ipynb index 7cc9598a..ba24a348 100644 --- a/templates/3a_monitor_MIBI_run.ipynb +++ b/templates/3a_monitor_MIBI_run.ipynb @@ -91,18 +91,19 @@ "* The `generate_pulse_heights` FOV callback computes the median pulse heights for each mass specified in the `panel`. \n", "
(See [4b_normalize_image_data](./4b_normalize_image_data.ipynb) for more details.)\n", "\n", - "Callbacks listed in the `run_callbacks` argument will be run only once all expected FOV's have been discovered and processed. \n", + "Callbacks listed in the `intermediate_callbacks` argument run and update as each new FOV is generated.\n", "\n", - "* The `plot_qc_metrics` run callback will run all currently available qc metrics on each FOV, and plot the results once the run has completed.
(See [3c_generate_qc_metrics](./3c_generate_qc_metrics.ipynb) for more details.)\n", + "* The `plot_qc_metrics` intermediate callback will run all currently available qc metrics on each FOV and plot the results.
(See [3c_generate_qc_metrics](./3c_generate_qc_metrics.ipynb) for more details.)\n", "\n", - "* The `plot_mph_metrics` run callback will compute the median pulse height data for each FOV, and plot the results once the run has completed. Additional arguments are: `regression` which when set to True will also plot the linear regression line for the data.
(See [3d_compute_median_pulse_height](./3d_compute_median_pulse_height.ipynb) for more details.)\n", + "* The `plot_mph_metrics` intermediate callback will compute the median pulse height data for each FOV and plot the results. Additional arguments are: `regression` which when set to True will also plot the linear regression line for the data.
(See [3d_compute_median_pulse_height](./3d_compute_median_pulse_height.ipynb) for more details.)\n", "\n", - "* The `image_stitching` run callback will create a single image, which stitched together all FOV images for a specific channel. Additional arguments are: `channels`.
(See [3e_stitch_images](./3e_stitch_images.ipynb) for more details.)\n", + "Callbacks listed in the `run_callbacks` argument will be run only once all expected FOV's have been discovered and processed. \n", "\n", - "* The `check_incomplete_fovs` run callback will check the run for any partially generated images.
(See [3b_extract_images_from_bin](./3b_extract_images_from_bin.ipynb) for more details.)" "* The `image_stitching` run callback will create a single image, which stitched together all FOV images for a specific channel. Additional arguments are: `channels`.
(See [3e_stitch_images](./3e_stitch_images.ipynb) for more details.)\n", "\n", - "* The `check_missing_fovs` run callback checks that the run produces the appropriate .bin and .json all files for all FOVs included in the run file." + "* The `check_incomplete_fovs` run callback will check the run for any partially generated images.
(See [3b_extract_images_from_bin](./3b_extract_images_from_bin.ipynb) for more details.)\n", + " \n", + "* The `check_missing_fovs` run callback checks that the run produces the appropriate .bin and .json all files for all FOVs included in the run file.
(See [3f_FOV_checks](./3f_FOV_checks.ipynb) for more details.)" ] }, {