Skip to content

Commit

Permalink
Bump ruff from 0.7.3 to 0.8.3 (#237)
Browse files Browse the repository at this point in the history
  • Loading branch information
153957 authored Dec 16, 2024
2 parents c099382 + 82b74b0 commit 66db9a2
Show file tree
Hide file tree
Showing 9 changed files with 73 additions and 69 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ dependencies = [
dev = [
'Sphinx',
'coverage==7.6.9',
'ruff==0.7.3',
'ruff==0.8.3',
]
astropy = [
'astropy>=5.0.0',
Expand Down
78 changes: 39 additions & 39 deletions sapphire/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,59 +103,59 @@
from .transformations.clock import datetime_to_gps, gps_to_datetime

__all__ = [
'analysis',
'api',
'clusters',
'corsika',
'data',
'esd',
'kascade',
'publicdb',
'qsub',
'simulations',
'storage',
'time_util',
'transformations',
'utils',
'determine_detector_timing_offsets',
'DetermineStationTimingOffsets',
'CoincidenceQuery',
'Coincidences',
'CoincidencesESD',
'ConeFrontSimulation',
'CorsikaQuery',
'DataReduction',
'DetermineStationTimingOffsets',
'FindMostProbableValueInSpectrum',
'FlatFrontSimulation',
'GroundParticlesSimulation',
'HiSPARCNetwork',
'HiSPARCStations',
'KascadeLdfSimulation',
'MeanFilter',
'MultipleGroundParticlesSimulation',
'Network',
'NkgLdfSimulation',
'ProcessEvents',
'ProcessEventsFromSource',
'ProcessEventsFromSourceWithTriggerOffset',
'ProcessWeather',
'ProcessWeatherFromSource',
'ProcessSingles',
'ProcessSinglesFromSource',
'TraceObservables',
'MeanFilter',
'DataReduction',
'ProcessTimeDeltas',
'ProcessWeather',
'ProcessWeatherFromSource',
'ReconstructESDCoincidences',
'ReconstructESDEvents',
'ReconstructESDEventsFromSource',
'ReconstructESDCoincidences',
'ProcessTimeDeltas',
'Network',
'Station',
'HiSPARCStations',
'HiSPARCNetwork',
'ScienceParkCluster',
'CorsikaQuery',
'quick_download',
'load_data',
'Station',
'TraceObservables',
'analysis',
'api',
'clusters',
'corsika',
'data',
'datetime_to_gps',
'determine_detector_timing_offsets',
'download_coincidences',
'download_data',
'download_lightning',
'download_coincidences',
'GroundParticlesSimulation',
'MultipleGroundParticlesSimulation',
'KascadeLdfSimulation',
'NkgLdfSimulation',
'FlatFrontSimulation',
'ConeFrontSimulation',
'esd',
'gps_to_datetime',
'kascade',
'load_data',
'publicdb',
'qsub',
'quick_download',
'run_tests',
'simulations',
'storage',
'time_util',
'transformations',
'utils',
'zenithazimuth_to_equatorial',
'gps_to_datetime',
'datetime_to_gps',
]
12 changes: 6 additions & 6 deletions sapphire/analysis/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from datetime import datetime, timedelta
from itertools import chain, combinations, tee

from numpy import abs, arange, histogram, isnan, linspace, nan, percentile, sqrt, std, sum
from numpy import absolute, arange, histogram, isnan, linspace, nan, percentile, sqrt, std
from scipy.optimize import curve_fit

from ..api import Station
Expand Down Expand Up @@ -60,7 +60,7 @@ def determine_detector_timing_offsets(events, station=None):
offsets[detector_id], _ = determine_detector_timing_offset(dt, dz)

# If all except reference are nan, make reference nan.
if sum(isnan(offsets)) == 3:
if sum(isnan(offset) for offset in offsets) == 3:
offsets = [nan, nan, nan, nan]

# Try to make detector 2 the reference point, if it is not nan.
Expand All @@ -80,8 +80,8 @@ def determine_detector_timing_offset(dt, dz=0):
the error of the mean.
"""
dt_filter = abs(dt + dz / c) < 100
if not sum(dt_filter):
dt_filter = absolute(dt + dz / c) < 100
if not dt_filter.sum():
return nan, nan
p = round_in_base(percentile(dt.compress(dt_filter), [0.5, 99.5]), 2.5)
bins = arange(p[0] + 1.25, p[1], 2.5)
Expand Down Expand Up @@ -369,7 +369,7 @@ def fit_timing_offset(dt, bins):
popt, pcov = curve_fit(gauss, x, y, p0=(len(dt), 0.0, std(dt)), sigma=sigma, absolute_sigma=False)
offset = popt[1]
width = popt[2]
offset_error = width / sqrt(sum(y))
offset_error = width / sqrt(y.sum())
except (RuntimeError, TypeError):
offset, offset_error = nan, nan
return offset, offset_error
Expand All @@ -389,7 +389,7 @@ def determine_best_reference(filters):

for detector_id in ids:
idx = [j for j in ids if j != detector_id]
lengths.append(sum(filters[detector_id] & (filters[idx[0]] | filters[idx[1]] | filters[idx[2]])))
lengths.append((filters[detector_id] & (filters[idx[0]] | filters[idx[1]] | filters[idx[2]])).sum())
return lengths.index(max(lengths))


Expand Down
17 changes: 9 additions & 8 deletions sapphire/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,13 +449,14 @@ def uptime(self, stations, start=None, end=None):
:returns: number of hours with simultaneous data.
"""
data = {}

if not hasattr(stations, '__len__'):
stations = [stations]

for station in stations:
data[station] = Station(station, force_fresh=self.force_fresh, force_stale=self.force_stale).event_time()
data = {
station: Station(station, force_fresh=self.force_fresh, force_stale=self.force_stale).event_time()
for station in stations
}

first = min(values['timestamp'][0] for values in data.values())
last = max(values['timestamp'][-1] for values in data.values())
Expand All @@ -466,12 +467,12 @@ def uptime(self, stations, start=None, end=None):
minimum_events_per_hour = 500
maximum_events_per_hour = 5_000

for station in data:
for event_time_data in data.values():
is_active = zeros(len_array)
start_i = (data[station]['timestamp'][0] - first) // 3600
end_i = start_i + len(data[station])
is_active[start_i:end_i] = (data[station]['counts'] > minimum_events_per_hour) & (
data[station]['counts'] < maximum_events_per_hour
start_i = (event_time_data['timestamp'][0] - first) // 3600
end_i = start_i + len(event_time_data)
is_active[start_i:end_i] = (event_time_data['counts'] > minimum_events_per_hour) & (
event_time_data['counts'] < maximum_events_per_hour
)
all_active = logical_and(all_active, is_active)

Expand Down
2 changes: 1 addition & 1 deletion sapphire/simulations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@

from . import base, detector, gammas, groundparticles, ldf, showerfront

__all__ = ['base', 'detector', 'groundparticles', 'ldf', 'showerfront', 'gammas']
__all__ = ['base', 'detector', 'gammas', 'groundparticles', 'ldf', 'showerfront']
4 changes: 2 additions & 2 deletions sapphire/simulations/groundparticles.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ def simulate_trigger(self, detector_observables):
detectors_low = sum(True for observables in detector_observables if observables['n'] > 0.3)
detectors_high = sum(True for observables in detector_observables if observables['n'] > 0.5)

return (
n_detectors == 4 and (detectors_high >= 2 or detectors_low >= 3) or n_detectors == 2 and detectors_low >= 2
return (n_detectors == 4 and (detectors_high >= 2 or detectors_low >= 3)) or (
n_detectors == 2 and detectors_low >= 2
)

def simulate_gps(self, station_observables, shower_parameters, station):
Expand Down
12 changes: 7 additions & 5 deletions sapphire/tests/analysis/test_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import tables

from numpy import all, array, isnan, nan, random, std
from numpy import array, isnan, nan, random, std

from sapphire import HiSPARCNetwork, HiSPARCStations
from sapphire.analysis import calibration
Expand All @@ -33,8 +33,9 @@ def test_determine_detector_timing_offsets(self):
@patch.object(calibration, 'fit_timing_offset')
def test_determine_detector_timing_offset(self, mock_fit):
# Empty list
offset = calibration.determine_detector_timing_offset(array([]))
self.assertTrue(all(isnan(offset)))
offset, error = calibration.determine_detector_timing_offset(array([]))
self.assertTrue(isnan(offset))
self.assertTrue(isnan(error))

dt = array([-10, 0, 10])
dz = 0.6
Expand Down Expand Up @@ -74,8 +75,9 @@ def test_determine_station_timing_offset(self, mock_fit, mock_percentile):
dzc = dz / c

# Empty list
offset = calibration.determine_station_timing_offset([])
self.assertTrue(all(isnan(offset)))
offset, error = calibration.determine_station_timing_offset([])
self.assertTrue(isnan(offset))
self.assertTrue(isnan(error))

# Good result
mock_fit.return_value = (1.0, 5.0)
Expand Down
11 changes: 6 additions & 5 deletions sapphire/tests/validate_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

import tables

from numpy import all, array
from numpy.testing import assert_array_almost_equal
from numpy import array
from numpy.testing import assert_array_almost_equal, assert_equal


def validate_results(test, expected_path, actual_path):
Expand Down Expand Up @@ -92,7 +92,7 @@ def validate_vlarrays(test, expected_node, actual_node):
f"VLArrays '{expected_node._v_pathname}' do not have the same shape.",
)
for expected_array, actual_array in zip(expected_node, actual_node):
test.assertTrue(all(expected_array == actual_array), f"VLArrays '{expected_node._v_pathname}' do not match.")
assert_equal(actual_array, expected_array, f"VLArrays '{expected_node._v_pathname}' do not match.")


def validate_arrays(test, expected_node, actual_node):
Expand All @@ -103,8 +103,9 @@ def validate_arrays(test, expected_node, actual_node):
actual_node.shape,
f"Arrays '{expected_node._v_pathname}' do not have the same shape.",
)
test.assertTrue(
all(array(expected_node.read()) == array(actual_node.read())),
assert_equal(
array(actual_node.read()),
array(expected_node.read()),
f"Arrays '{expected_node._v_pathname}' do not match.",
)

Expand Down
4 changes: 2 additions & 2 deletions sapphire/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from functools import wraps
from os import environ

from numpy import arcsin, ceil, floor, pi, round, sin, sqrt
from numpy import arcsin, around, ceil, floor, pi, sin, sqrt
from progressbar import ETA, Bar, Percentage, ProgressBar
from scipy.stats import norm

Expand Down Expand Up @@ -73,7 +73,7 @@ def floor_in_base(value, base):
def round_in_base(value, base):
"""Get nearest multiple of base to the value"""

return base * round(value / base)
return base * around(value / base)


def closest_in_list(value, items):
Expand Down

0 comments on commit 66db9a2

Please sign in to comment.