From 517e50b957ddbefde2850c37bf45a50b79a19980 Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Sun, 21 Mar 2021 11:40:19 +0100 Subject: [PATCH 1/9] Make data directory if it doesn't exist --- icecube_tools/utils/data.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/icecube_tools/utils/data.py b/icecube_tools/utils/data.py index 50aad73..8b795d4 100644 --- a/icecube_tools/utils/data.py +++ b/icecube_tools/utils/data.py @@ -43,6 +43,10 @@ def __init__( self.ls(verbose=False, update=update) + # Make data directory if it doesn't exist + if not os.path.exists(self.data_directory): + os.makedirs(self.data_directory) + def ls(self, verbose=True, update=False): """ List the available datasets. From c8a3cac2f4b88033762356bc9718fa4750b4c923 Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 19:20:23 +0200 Subject: [PATCH 2/9] Update requests cache for concurrent access --- icecube_tools/utils/data.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/icecube_tools/utils/data.py b/icecube_tools/utils/data.py index 8b795d4..3c5b535 100644 --- a/icecube_tools/utils/data.py +++ b/icecube_tools/utils/data.py @@ -39,7 +39,11 @@ def __init__( self.data_directory = data_directory - requests_cache.install_cache(cache_name=cache_name) + requests_cache.install_cache( + cache_name=cache_name, + expire_after=-1, + backend="redis", + ) self.ls(verbose=False, update=update) From 7c6285ce7bf8aa08a78dc92943f7f78a2ec63c9f Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 19:29:35 +0200 Subject: [PATCH 3/9] Remove redis backend --- icecube_tools/utils/data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/icecube_tools/utils/data.py b/icecube_tools/utils/data.py index 3c5b535..bb03da2 100644 --- a/icecube_tools/utils/data.py +++ b/icecube_tools/utils/data.py @@ -42,7 +42,6 @@ def __init__( requests_cache.install_cache( cache_name=cache_name, expire_after=-1, - backend="redis", ) self.ls(verbose=False, update=update) From 72797fc22e705bf39e37c9e6645aac610fbcb18e Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 20:23:27 +0200 Subject: [PATCH 4/9] Modify from_dataset method to only load files if prompted --- icecube_tools/detector/angular_resolution.py | 23 ++++++++++++----- icecube_tools/detector/effective_area.py | 27 ++++++++++++++------ icecube_tools/detector/energy_resolution.py | 26 +++++++++++++------ 3 files changed, 53 insertions(+), 23 deletions(-) diff --git a/icecube_tools/detector/angular_resolution.py b/icecube_tools/detector/angular_resolution.py index 452d65f..a272e9b 100644 --- a/icecube_tools/detector/angular_resolution.py +++ b/icecube_tools/detector/angular_resolution.py @@ -4,7 +4,7 @@ from astropy.coordinates import SkyCoord from astropy import units as u -from icecube_tools.utils.data import IceCubeData, find_files +from icecube_tools.utils.data import IceCubeData, find_files, data_directory from icecube_tools.utils.vMF import get_kappa, get_theta_p """ @@ -267,23 +267,32 @@ def ret_ang_err(self): return self._ret_ang_err @classmethod - def from_dataset(cls, dataset_id, **kwargs): + def from_dataset(cls, dataset_id, fetch=False, **kwargs): """ Load angular resolution from publicly available data. - """ - data_interface = IceCubeData() + :dataset_id: ID date of the dataset e.g. "20181018" + :param fetch: If true, download dataset if missing + """ if dataset_id not in _supported_dataset_ids: raise NotImplementedError("This dataset is not currently supported") - dataset = data_interface.find(dataset_id) + if fetch: + + data_interface = IceCubeData() + + dataset = data_interface.find(dataset_id) - data_interface.fetch(dataset) + data_interface.fetch(dataset) + + dataset_dir = data_interface.get_path_to(dataset[0]) + + else: - dataset_dir = data_interface.get_path_to(dataset[0]) + dataset_dir = data_directory if dataset_id == "20181018": diff --git a/icecube_tools/detector/effective_area.py b/icecube_tools/detector/effective_area.py index e0bf66a..48cef9c 100644 --- a/icecube_tools/detector/effective_area.py +++ b/icecube_tools/detector/effective_area.py @@ -1,7 +1,11 @@ import numpy as np from abc import ABC, abstractmethod -from icecube_tools.utils.data import IceCubeData, find_files +from icecube_tools.utils.data import ( + IceCubeData, + find_files, + data_directory, +) """ Module for working with the public IceCube @@ -258,7 +262,7 @@ def detection_probability(self, true_energy, true_cos_zenith, max_energy): return scaled_values[energy_index] @classmethod - def from_dataset(cls, dataset_id): + def from_dataset(cls, dataset_id, fetch=False, **kwargs): """ Build effective area from a public dataset. @@ -267,19 +271,26 @@ def from_dataset(cls, dataset_id): effective areas. :param dataset_id: Date of dataset release e.g. 20181018 + :param fetch: If true, download dataset if not existing """ - data_interface = IceCubeData() - if dataset_id not in _supported_dataset_ids: raise NotImplementedError("This dataset is not currently supported") - dataset = data_interface.find(dataset_id) + if fetch: + + data_interface = IceCubeData() + + dataset = data_interface.find(dataset_id) - data_interface.fetch(dataset) + data_interface.fetch(dataset) + + dataset_dir = data_interface.get_path_to(dataset[0]) + + else: - dataset_dir = data_interface.get_path_to(dataset[0]) + dataset_dir = data_directory # Find filename if dataset_id == "20181018": @@ -296,4 +307,4 @@ def from_dataset(cls, dataset_id): # Latest dataset aeff_file_name = files[0] - return cls(aeff_file_name) + return cls(aeff_file_name, **kwargs) diff --git a/icecube_tools/detector/energy_resolution.py b/icecube_tools/detector/energy_resolution.py index 9ba1378..852f4ea 100644 --- a/icecube_tools/detector/energy_resolution.py +++ b/icecube_tools/detector/energy_resolution.py @@ -7,7 +7,7 @@ R2015AeffReader, R2015_AEFF_FILENAME, ) -from icecube_tools.utils.data import IceCubeData, find_files +from icecube_tools.utils.data import IceCubeData, find_files, data_directory """ Module for handling the energy resolution @@ -119,23 +119,33 @@ def __init__(self, filename, conditional=GIVEN_ETRUE, **kwargs): self._fit_polynomial() @classmethod - def from_dataset(cls, dataset_id, **kwargs): + def from_dataset(cls, dataset_id, fetch=False, **kwargs): """ Load energy resolution from publicly available data. - """ - data_interface = IceCubeData() + :param dataset_id: Date identifying the dataset + e.g. "20181018" + :param fetch: If true, download dataset if missing + """ if dataset_id not in _supported_dataset_ids: raise NotImplementedError("This dataset is not currently supported") - dataset = data_interface.find(dataset_id) + if fetch: + + data_interface = IceCubeData() + + dataset = data_interface.find(dataset_id) - data_interface.fetch(dataset) + data_interface.fetch(dataset) + + dataset_dir = data_interface.get_path_to(dataset[0]) + + else: - dataset_dir = data_interface.get_path_to(dataset[0]) + dataset_dir = data_directory if dataset_id == "20150820": @@ -143,7 +153,7 @@ def from_dataset(cls, dataset_id, **kwargs): eres_file_name = files[0] - return cls(eres_file_name) + return cls(eres_file_name, **kwargs) def _integrate_out_cos_zenith(self): """ From 7c7557045df479e8aca015f75303c09b6af4935e Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 20:25:24 +0200 Subject: [PATCH 5/9] Set fetch=True in from_dataset() by default --- icecube_tools/detector/angular_resolution.py | 2 +- icecube_tools/detector/effective_area.py | 2 +- icecube_tools/detector/energy_resolution.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/icecube_tools/detector/angular_resolution.py b/icecube_tools/detector/angular_resolution.py index a272e9b..aec0ba7 100644 --- a/icecube_tools/detector/angular_resolution.py +++ b/icecube_tools/detector/angular_resolution.py @@ -267,7 +267,7 @@ def ret_ang_err(self): return self._ret_ang_err @classmethod - def from_dataset(cls, dataset_id, fetch=False, **kwargs): + def from_dataset(cls, dataset_id, fetch=True, **kwargs): """ Load angular resolution from publicly available data. diff --git a/icecube_tools/detector/effective_area.py b/icecube_tools/detector/effective_area.py index 48cef9c..1cbcabb 100644 --- a/icecube_tools/detector/effective_area.py +++ b/icecube_tools/detector/effective_area.py @@ -262,7 +262,7 @@ def detection_probability(self, true_energy, true_cos_zenith, max_energy): return scaled_values[energy_index] @classmethod - def from_dataset(cls, dataset_id, fetch=False, **kwargs): + def from_dataset(cls, dataset_id, fetch=True, **kwargs): """ Build effective area from a public dataset. diff --git a/icecube_tools/detector/energy_resolution.py b/icecube_tools/detector/energy_resolution.py index 852f4ea..51e0639 100644 --- a/icecube_tools/detector/energy_resolution.py +++ b/icecube_tools/detector/energy_resolution.py @@ -119,7 +119,7 @@ def __init__(self, filename, conditional=GIVEN_ETRUE, **kwargs): self._fit_polynomial() @classmethod - def from_dataset(cls, dataset_id, fetch=False, **kwargs): + def from_dataset(cls, dataset_id, fetch=True, **kwargs): """ Load energy resolution from publicly available data. From 4c24bd60d081c062fa57c7f58b6635db7383d12d Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 20:56:49 +0200 Subject: [PATCH 6/9] Fix python-package workflow --- .github/workflows/python-package.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 366892c..b2b2d09 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -25,6 +25,7 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install flake8 pytest + python -m pip install . if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | From 381a4f6c8d055ba4d206e9f34cbb99d6c99400d8 Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 21:08:43 +0200 Subject: [PATCH 7/9] Update python-package.yml --- .github/workflows/python-package.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b2b2d09..8a3de09 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -25,8 +25,7 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install flake8 pytest - python -m pip install . - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + python -m pip install . - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names @@ -35,4 +34,4 @@ jobs: flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with pytest run: | - pytest tests \ No newline at end of file + pytest tests From 3c38ec1b74c18d5d228f039b34b7c5f0439710eb Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 21:10:48 +0200 Subject: [PATCH 8/9] Add vMF to deps --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index fed16b9..96e53e1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,6 +26,7 @@ install_requires = bs4 tqdm versioneer + vMF [versioneer] VCS=git From 0d94c3d4797c4f1905265716a669b977f9993b41 Mon Sep 17 00:00:00 2001 From: Francesca Capel Date: Thu, 1 Apr 2021 21:14:50 +0200 Subject: [PATCH 9/9] Add pandas to deps --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 96e53e1..a61dbec 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,6 +27,7 @@ install_requires = tqdm versioneer vMF + pandas [versioneer] VCS=git