diff --git a/icecube_tools/detector/r2021.py b/icecube_tools/detector/r2021.py index 89df034..a35e259 100644 --- a/icecube_tools/detector/r2021.py +++ b/icecube_tools/detector/r2021.py @@ -12,7 +12,11 @@ from icecube_tools.detector.energy_resolution import EnergyResolution from icecube_tools.detector.angular_resolution import AngularResolution from icecube_tools.utils.data import ( - find_files, data_directory, IceCubeData, ddict, available_irf_periods + find_files, + data_directory, + IceCubeData, + ddict, + available_irf_periods, ) from icecube_tools.utils.vMF import get_kappa, get_theta_p @@ -26,29 +30,26 @@ EMIN = 1e2 EMAX = 1e9 EBREAK = 1e4 -INDEX1 = 0. +INDEX1 = 0.0 INDEX2 = -1.1 K = 5e-5 -class DummyPDF(): +class DummyPDF: def __init__(self): pass - def pdf(self, x): if isinstance(x, np.ndarray): return np.zeros_like(x) else: - return 0. + return 0.0 - def cdf(self, x): if isinstance(x, np.ndarray): return np.zeros_like(x) else: - return 0. - + return 0.0 def rvs(self, size=1, random_state=42): raise NotImplementedError("Dummies cannot be sampled from!") @@ -63,7 +64,7 @@ class R2021IRF(EnergyResolution, AngularResolution): """ STACK = {} - + def __init__(self, filename, period, **kwargs): """ Special class to handle smearing effects given in the 2021 data release. @@ -72,19 +73,19 @@ def __init__(self, filename, period, **kwargs): if period in R2021IRF.STACK: self.__dict__ = self.STACK[period].__dict__ else: - #self.read(fetch) + # self.read(fetch) self._filename = filename self.ret_ang_err_p = kwargs.get("ret_ang_err_p", 0.68) self.prob_contained = 0.68 - self.year = 2012 # subject to change + self.year = 2012 # subject to change self.nu_type = "nu_mu" - self.uniform = uniform(0, 2*np.pi) + self.uniform = uniform(0, 2 * np.pi) self.output = np.loadtxt(self._filename, comments="#") self.dataset = self.output - #convert PSF and AngErr values to log(angle/degree) + # convert PSF and AngErr values to log(angle/degree) self.dataset[:, 6:-1] = np.log10(self.dataset[:, 6:-1]) true_energy_lower = np.array(list(set(self.output[:, 0]))) @@ -101,39 +102,54 @@ def __init__(self, filename, period, **kwargs): self.faulty = [] - for c_d, (d_l, d_h) in enumerate(zip(self.declination_bins[:-1], self.declination_bins[1:])): + for c_d, (d_l, d_h) in enumerate( + zip(self.declination_bins[:-1], self.declination_bins[1:]) + ): for c, tE in enumerate(self.true_energy_bins[:-1]): - reduced = self.dataset[np.nonzero((np.isclose(self.dataset[:, 0], tE)) & - np.isclose(self.dataset[:, 2], np.rad2deg(d_l))) + reduced = self.dataset[ + np.nonzero( + (np.isclose(self.dataset[:, 0], tE)) + & np.isclose(self.dataset[:, 2], np.rad2deg(d_l)) + ) ] - if np.all(np.isclose(reduced[:, -1], np.zeros_like(reduced[:, -1]))): + if np.all( + np.isclose(reduced[:, -1], np.zeros_like(reduced[:, -1])) + ): self.faulty.append((c, c_d)) if self.faulty: logger.warning(f"Empty true energy bins at: {self.faulty}") - - self.ang_res_values = 1 # placeholder, isn't used anyway + + self.ang_res_values = 1 # placeholder, isn't used anyway self.true_energy_values = ( self.true_energy_bins[0:-1] + np.diff(self.true_energy_bins) / 2 ) - logger.debug('Creating Ereco distributions') - #Reco energy is handled without ddict() because it's not that much calculation - #and has no parts with zero-entries + logger.debug("Creating Ereco distributions") + # Reco energy is handled without ddict() because it's not that much calculation + # and has no parts with zero-entries # ^ this aged like milk - self.reco_energy = np.empty((self.true_energy_bins.size-1, self.declination_bins.size-1), dtype=rv_histogram) + self.reco_energy = np.empty( + (self.true_energy_bins.size - 1, self.declination_bins.size - 1), + dtype=rv_histogram, + ) # self.reco_energy_splines = np.empty((self.declination_bins.size-1), dtype=RectBivariateSpline) - self.reco_energy_bins = np.empty((self.true_energy_bins.size-1, self.declination_bins.size-1), dtype=np.ndarray) + self.reco_energy_bins = np.empty( + (self.true_energy_bins.size - 1, self.declination_bins.size - 1), + dtype=np.ndarray, + ) for c_e, e in enumerate(self.true_energy_bins[:-1]): for c_d, d in enumerate(self.declination_bins[:-1]): if not (c_e, c_d) in self.faulty: n, bins = self._marginalisation(c_e, c_d) - self.reco_energy[c_e, c_d] = rv_histogram((n, bins), density=False) + self.reco_energy[c_e, c_d] = rv_histogram( + (n, bins), density=False + ) self.reco_energy_bins[c_e, c_d] = bins - + else: # workaround for true energy bins completely empty - idx = c_e-1 + idx = c_e - 1 while True: try: _, bins = self._marginalisation(idx, c_d) @@ -146,13 +162,14 @@ def __init__(self, filename, period, **kwargs): idx += 1 self._values = [] - logger.debug('Creating empty dicts for kinematic angle dists and angerr dists') + logger.debug( + "Creating empty dicts for kinematic angle dists and angerr dists" + ) self._marginal_pdf_psf = ddict() self._marginal_pdf_angerr = ddict() R2021IRF.STACK[period] = self - @staticmethod def get_angle(vec1, vec2): @@ -163,8 +180,12 @@ def get_angle(vec1, vec2): :return: Angle between vectors in degrees """ - return np.rad2deg(np.arccos(np.dot(vec1.T, vec2) / (np.linalg.norm(vec1, axis=0) * np.linalg.norm(vec2, axis=0)))) - + return np.rad2deg( + np.arccos( + np.dot(vec1.T, vec2) + / (np.linalg.norm(vec1, axis=0) * np.linalg.norm(vec2, axis=0)) + ) + ) def sample_energy(self, coord, Etrue, seed=42, show_progress=False): """ @@ -175,10 +196,8 @@ def sample_energy(self, coord, Etrue, seed=42, show_progress=False): angle between incident and deflected direction in degrees, reconstructed energy in GeV """ - #TODO merge this with R2021IRF().sample() and maybe add a keyword - #if only reconstructed energy is supposed to be sampled - - + # TODO merge this with R2021IRF().sample() and maybe add a keyword + # if only reconstructed energy is supposed to be sampled ra, dec = coord if not isinstance(ra, np.ndarray): @@ -199,15 +218,15 @@ def sample_energy(self, coord, Etrue, seed=42, show_progress=False): c_e, _, c_d, _ = self._return_etrue_bins(Etrue, dec) Ereco = np.zeros(size) - logger.debug(f'Energy and declination bins: {c_e}, {c_d}') - logger.debug('Sampling Ereco') + logger.debug(f"Energy and declination bins: {c_e}, {c_d}") + logger.debug("Sampling Ereco") + + # Idea behind this loop structure: + # Group data by the bin indices + # Sample further data for each group in one function call + # Move on to next group + # Omits loop over single events -> faster, in principle - #Idea behind this loop structure: - #Group data by the bin indices - #Sample further data for each group in one function call - #Move on to next group - #Omits loop over single events -> faster, in principle - set_e = set(c_e) set_d = set(c_d) for idx_e in set_e: @@ -216,11 +235,12 @@ def sample_energy(self, coord, Etrue, seed=42, show_progress=False): for idx_d in set_d: _index_d = np.argwhere(idx_d == c_d).squeeze() _index_f = (np.intersect1d(_index_d, _index_e),) - Ereco[_index_f] = self.reco_energy[idx_e, idx_d].rvs(size=_index_f[0].size, random_state=seed) + Ereco[_index_f] = self.reco_energy[idx_e, idx_d].rvs( + size=_index_f[0].size, random_state=seed + ) return np.power(10, Ereco) - def sample(self, coord, Etrue, seed=42, show_progress=False): """ Sample new ra, dec values given a true energy and direction. @@ -231,9 +251,9 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): reconstructed energy in GeV """ - #if show_progress: + # if show_progress: # logger.basicConfig(level=logging.INFO) - #else: + # else: # logger.basicConfig(level=logging.CRITICAL) ra, dec = coord @@ -252,7 +272,7 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): size = 1 Etrue = np.array([Etrue]) - #Initialise empty arrays for data + # Initialise empty arrays for data c_e, _, c_d, _ = self._return_etrue_bins(Etrue, dec) c_e_r = np.zeros(size) c_k = np.zeros(size) @@ -264,15 +284,15 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): new_ras = np.zeros(size) new_decs = np.zeros(size) - logger.debug(f'Energy and declination bins: {c_e}, {c_d}') - logger.debug('Sampling Ereco') + logger.debug(f"Energy and declination bins: {c_e}, {c_d}") + logger.debug("Sampling Ereco") + + # Idea behind this loop structure: + # Group data by the bin indices + # Sample further data for each group in one function call + # Move on to next group + # Omits loop over single events -> faster, in principle - #Idea behind this loop structure: - #Group data by the bin indices - #Sample further data for each group in one function call - #Move on to next group - #Omits loop over single events -> faster, in principle - set_e = set(c_e) set_d = set(c_d) for idx_e in set_e: @@ -283,11 +303,15 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): _index_f = (np.intersect1d(_index_d, _index_e),) if _index_f[0].size == 0: continue - Ereco[_index_f] = self.reco_energy[idx_e, idx_d].rvs(size=_index_f[0].size, random_state=seed) - current_c_e_r = self._return_reco_energy_bins(idx_e, idx_d, Ereco[_index_f]) + Ereco[_index_f] = self.reco_energy[idx_e, idx_d].rvs( + size=_index_f[0].size, random_state=seed + ) + current_c_e_r = self._return_reco_energy_bins( + idx_e, idx_d, Ereco[_index_f] + ) c_e_r[_index_f] = current_c_e_r - logger.debug(f'Ereco: {Ereco[_index_f]}, bins: {current_c_e_r}') + logger.debug(f"Ereco: {Ereco[_index_f]}, bins: {current_c_e_r}") set_e_r = set(current_c_e_r) @@ -295,7 +319,9 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): _index_help = np.argwhere(c_e_r == idx_e_r).squeeze() _index_r = (np.intersect1d(_index_f[0], _index_help),) - kinematic_angle[_index_r] = self.marginal_pdf_psf(idx_e, idx_d, idx_e_r, 'pdf').rvs(size=_index_r[0].size, random_state=seed) + kinematic_angle[_index_r] = self.marginal_pdf_psf( + idx_e, idx_d, idx_e_r, "pdf" + ).rvs(size=_index_r[0].size, random_state=seed) """ try: kinematic_angle[_index_r] = self.marginal_pdf_psf(idx_e, idx_d, idx_e_r, 'pdf').rvs(size=_index_r[0].size, random_state=seed) @@ -307,14 +333,18 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): self.marginal_pdf_psf.add(rv_histogram((n, bins), density=False), idx_e, idx_d, idx_e_r, 'pdf') kinematic_angle[_index_r] = self.marginal_pdf_psf(idx_e, idx_d, idx_e_r, 'pdf').rvs(size=_index_r[0].size, random_state=seed) """ - current_c_k = self._return_kinematic_bins(idx_e, idx_d, idx_e_r, kinematic_angle[_index_r]) + current_c_k = self._return_kinematic_bins( + idx_e, idx_d, idx_e_r, kinematic_angle[_index_r] + ) c_k[_index_r] = current_c_k set_k = set(current_c_k) for idx_k in set_k: _index_help = np.argwhere(c_k == idx_k).squeeze() _index_k = (np.intersect1d(_index_r[0], _index_help),) - ang_err[_index_k] = self.marginal_pdf_angerr(idx_e, idx_d, idx_e_r, idx_k, 'pdf').rvs(size=_index_k[0].size, random_state=seed) + ang_err[_index_k] = self.marginal_pdf_angerr( + idx_e, idx_d, idx_e_r, idx_k, "pdf" + ).rvs(size=_index_k[0].size, random_state=seed) """ try: ang_err[_index_k] = self.marginal_pdf_angerr(idx_e, idx_d, idx_e_r, idx_k, 'pdf').rvs(size=_index_k[0].size, random_state=seed) @@ -325,17 +355,18 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): self.marginal_pdf_angerr.add(bins, idx_e, idx_d, idx_e_r, idx_k, 'bins') ang_err[_index_k] = self.marginal_pdf_angerr(idx_e, idx_d, idx_e_r, idx_k, 'pdf').rvs(size=_index_k[0].size, random_state=seed) """ - #kappa needs an angle in degrees, prob of containment, here 0.5 as stated in the paper + # kappa needs an angle in degrees, prob of containment, here 0.5 as stated in the paper ang_err = np.power(10, ang_err) - kappa = get_kappa(ang_err, 0.5) - reco_ang_err = get_theta_p(kappa, self.ret_ang_err_p) + kappa = get_kappa(ang_err, 0.683) + # reco_ang_err = get_theta_p(kappa, self.ret_ang_err_p) + reco_ang_err = ang_err logger.debug(kappa.shape) logger.debug(unit_vector.shape) new_unit_vector = sample_vMF(unit_vector, kappa) if new_unit_vector.shape != (3, Etrue.size): new_unit_vector = new_unit_vector.T - #create sky coordinates from rotated/deflected vector + # create sky coordinates from rotated/deflected vector new_sky_coord = SkyCoord( x=new_unit_vector[0], y=new_unit_vector[1], @@ -348,34 +379,43 @@ def sample(self, coord, Etrue, seed=42, show_progress=False): logger.debug(f"reco_ang_error shape: {reco_ang_err.shape}") return new_ras, new_decs, reco_ang_err, np.power(10, Ereco) - def marginal_pdf_psf(self, idx_e, idx_d, idx_e_r, hist_type, c_psf=None): try: self._marginal_pdf_psf(idx_e, idx_d, idx_e_r, hist_type) except KeyError: - logger.debug(f'Creating kinematic angle dist for {idx_e}, {idx_d}, {idx_e_r}') + logger.debug( + f"Creating kinematic angle dist for {idx_e}, {idx_d}, {idx_e_r}" + ) n, bins = self._marginalize_over_angerr(idx_e, idx_d, idx_e_r) - self._marginal_pdf_psf.add(bins, idx_e, idx_d, idx_e_r, 'bins') - self._marginal_pdf_psf.add(rv_histogram((n, bins), density=False), idx_e, idx_d, idx_e_r, 'pdf') + self._marginal_pdf_psf.add(bins, idx_e, idx_d, idx_e_r, "bins") + self._marginal_pdf_psf.add( + rv_histogram((n, bins), density=False), idx_e, idx_d, idx_e_r, "pdf" + ) finally: if c_psf is None: return self._marginal_pdf_psf(idx_e, idx_d, idx_e_r, hist_type) else: return self._marginal_pdf_psf(idx_e, idx_d, idx_e_r, hist_type, c_psf) - def marginal_pdf_angerr(self, idx_e, idx_d, idx_e_r, idx_k, hist_type): try: return self._marginal_pdf_angerr(idx_e, idx_d, idx_e_r, idx_k, hist_type) except KeyError as KE: - logger.debug(f'Creating AngErr dist for {idx_e}, {idx_d}, {idx_e_r}, {idx_k}') + logger.debug( + f"Creating AngErr dist for {idx_e}, {idx_d}, {idx_e_r}, {idx_k}" + ) n, bins = self._get_angerr_dist(idx_e, idx_d, idx_e_r, idx_k) - self._marginal_pdf_angerr.add(rv_histogram((n, bins), density=False), idx_e, idx_d, idx_e_r, idx_k, 'pdf') - self._marginal_pdf_angerr.add(bins, idx_e, idx_d, idx_e_r, idx_k, 'bins') + self._marginal_pdf_angerr.add( + rv_histogram((n, bins), density=False), + idx_e, + idx_d, + idx_e_r, + idx_k, + "pdf", + ) + self._marginal_pdf_angerr.add(bins, idx_e, idx_d, idx_e_r, idx_k, "bins") return self._marginal_pdf_angerr(idx_e, idx_d, idx_e_r, idx_k, hist_type) - - @classmethod def from_period(cls, period, **kwargs): """ @@ -397,9 +437,8 @@ def from_period(cls, period, **kwargs): files = find_files(dataset_dir, R2021_IRF_FILENAME) for f in files: - if "_".join((period, R2021_IRF_FILENAME)) in f: - return cls(f, period, **kwargs) - + if "_".join((period, R2021_IRF_FILENAME)) in f: + return cls(f, period, **kwargs) def _get_angerr_dist(self, c_e, c_d, c_e_r, c_psf): """ @@ -411,20 +450,40 @@ def _get_angerr_dist(self, c_e, c_d, c_e_r, c_psf): :return: normalised (over logspace) fractional counts, logarithmic bins in log(degrees) """ - presel_data = self.dataset[np.intersect1d(np.nonzero(np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), - np.nonzero(np.isclose(self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]))))] - - reduced_data = presel_data[np.intersect1d(np.nonzero(np.isclose(presel_data[:, 4], self.reco_energy_bins[c_e, c_d][c_e_r])), - np.nonzero(np.isclose(presel_data[:, 6], self.marginal_pdf_psf(c_e, c_d, c_e_r, 'bins', c_psf))))] + presel_data = self.dataset[ + np.intersect1d( + np.nonzero(np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), + np.nonzero( + np.isclose( + self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]) + ) + ), + ) + ] + + reduced_data = presel_data[ + np.intersect1d( + np.nonzero( + np.isclose( + presel_data[:, 4], self.reco_energy_bins[c_e, c_d][c_e_r] + ) + ), + np.nonzero( + np.isclose( + presel_data[:, 6], + self.marginal_pdf_psf(c_e, c_d, c_e_r, "bins", c_psf), + ) + ), + ) + ] - needed_vals = np.nonzero(reduced_data[:, 9] - reduced_data[:, 8]) + needed_vals = np.nonzero(reduced_data[:, 9] - reduced_data[:, 8]) bins = np.union1d(reduced_data[needed_vals, 9], reduced_data[needed_vals, 8]) frac_counts = reduced_data[needed_vals, -1].squeeze() frac_counts /= np.sum(frac_counts) return frac_counts, bins - def _return_etrue_bins(self, energy, declination): """ Returns the lower bin edges and their indices for given energy and declination. @@ -438,7 +497,9 @@ def _return_etrue_bins(self, energy, declination): if not isinstance(energy, np.ndarray): energy = np.array([energy]) - if np.all(energy >= self.true_energy_bins[0]) and np.all(energy <= self.true_energy_bins[-1]): + if np.all(energy >= self.true_energy_bins[0]) and np.all( + energy <= self.true_energy_bins[-1] + ): c_e = np.digitize(energy, self.true_energy_bins) idx = np.nonzero(c_e < self.true_energy_bins.shape[0]) c_e[idx] = c_e[idx] - 1 @@ -448,8 +509,9 @@ def _return_etrue_bins(self, energy, declination): else: raise ValueError("Some energy out of bounds.") - - if np.all(declination >= self.declination_bins[0]) and np.all(declination <= self.declination_bins[-1]): + if np.all(declination >= self.declination_bins[0]) and np.all( + declination <= self.declination_bins[-1] + ): c_d = np.digitize(declination, self.declination_bins) idx = np.nonzero(c_d < self.declination_bins.shape[0]) c_d[idx] -= 1 @@ -458,9 +520,8 @@ def _return_etrue_bins(self, energy, declination): d = self.declination_bins[c_d] else: raise ValueError("Some declination out of bounds.") - - return c_e, e, c_d, d + return c_e, e, c_d, d def _return_reco_energy_bins(self, c_e, c_d, Ereco): """ @@ -478,7 +539,6 @@ def _return_reco_energy_bins(self, c_e, c_d, Ereco): return index - def _return_kinematic_bins(self, c_e, c_d, c_e_r, angle): """ Returns bin index of kinematic angle given in log(degees). @@ -488,14 +548,13 @@ def _return_kinematic_bins(self, c_e, c_d, c_e_r, angle): :return: Bin index of kinematic angle """ - bins = self.marginal_pdf_psf(c_e, c_d, c_e_r, 'bins') + bins = self.marginal_pdf_psf(c_e, c_d, c_e_r, "bins") c_k = np.digitize(angle, bins) - 1 idx = np.nonzero(c_k == bins.shape[0] - 1) c_k[idx] = c_k[idx] - 1 return c_k - def _marginalisation(self, c_e, c_d, qoi="ERec"): """ Function that marginalises over the smearing data provided for the 2021 release. @@ -510,26 +569,38 @@ def _marginalisation(self, c_e, c_d, qoi="ERec"): else: raise ValueError("Not other quantity of interest is available.") - #do pre-selection of true energy and declination - reduced_data = self.dataset[np.intersect1d(np.argwhere( - np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), - np.argwhere( - np.isclose(self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]))))] - - bins = np.array(sorted(list(set(reduced_data[:, needed_index]).union( - set(reduced_data[:, needed_index+1]))))) + # do pre-selection of true energy and declination + reduced_data = self.dataset[ + np.intersect1d( + np.argwhere(np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), + np.argwhere( + np.isclose( + self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]) + ) + ), + ) + ] + + bins = np.array( + sorted( + list( + set(reduced_data[:, needed_index]).union( + set(reduced_data[:, needed_index + 1]) + ) + ) + ) + ) - frac_counts = np.zeros(bins.shape[0]-1) + frac_counts = np.zeros(bins.shape[0] - 1) - #marginalise over uninteresting quantities + # marginalise over uninteresting quantities for c_b, b in enumerate(bins[:-1]): indices = np.nonzero(np.isclose(b, reduced_data[:, needed_index])) frac_counts[c_b] = np.sum(reduced_data[indices, -1]) return frac_counts, bins - - def _marginalize_over_angerr(self, c_e, c_d, c_e_r): + def _marginalize_over_angerr(self, c_e, c_d, c_e_r): """ Function that marginalises over the smearing data provided for the 2021 release. :param int c_e: Index of energy bin @@ -538,18 +609,30 @@ def _marginalize_over_angerr(self, c_e, c_d, c_e_r): :return: n, bins of the created distribution/histogram """ - presel_data = self.dataset[np.intersect1d(np.nonzero(np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), - np.nonzero(np.isclose(self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]))))] - - reduced_data = presel_data[np.nonzero(np.isclose(presel_data[:, 4], self.reco_energy_bins[c_e, c_d][c_e_r]))] + presel_data = self.dataset[ + np.intersect1d( + np.nonzero(np.isclose(self.dataset[:, 0], self.true_energy_bins[c_e])), + np.nonzero( + np.isclose( + self.dataset[:, 2], np.rad2deg(self.declination_bins[c_d]) + ) + ), + ) + ] + reduced_data = presel_data[ + np.nonzero( + np.isclose(presel_data[:, 4], self.reco_energy_bins[c_e, c_d][c_e_r]) + ) + ] - bins = np.array(sorted(list(set(reduced_data[:, 6]).union( - set(reduced_data[:, 7]))))) + bins = np.array( + sorted(list(set(reduced_data[:, 6]).union(set(reduced_data[:, 7])))) + ) if bins.shape[0] != 0: - frac_counts = np.zeros(bins.shape[0]-1) + frac_counts = np.zeros(bins.shape[0] - 1) - #marginalise over uninteresting quantities + # marginalise over uninteresting quantities for c_b, b in enumerate(bins[:-1]): indices = np.nonzero(np.isclose(b, reduced_data[:, 6])) frac_counts[c_b] = np.sum(reduced_data[indices, -1]) @@ -558,7 +641,6 @@ def _marginalize_over_angerr(self, c_e, c_d, c_e_r): else: return None, None - def _do_rotation(self, vec, deflection, seed=42): """ Function called to sample deflections from appropriate distributions and @@ -571,21 +653,21 @@ def _do_rotation(self, vec, deflection, seed=42): def make_perp(vec): perp = np.zeros(3) - if not np.all(np.isclose(vec[:2], 0.)): - perp[0] = - vec[1] + if not np.all(np.isclose(vec[:2], 0.0)): + perp[0] = -vec[1] perp[1] = vec[0] perp /= np.linalg.norm(perp) else: - perp[1] = 1. + perp[1] = 1.0 return perp - #sample kinematic angle from distribution + # sample kinematic angle from distribution azimuth = self.uniform.rvs(size=1, random_state=seed)[0] deflection = np.deg2rad(np.power(10, deflection)) - logger.debug(f'azimuth: {azimuth}\ndeflection: {deflection}') + logger.debug(f"azimuth: {azimuth}\ndeflection: {deflection}") rot_vec_1 = make_perp(vec) - rot_vec_1 *= deflection - #create rotation object from vector + rot_vec_1 *= deflection + # create rotation object from vector rot_1 = R.from_rotvec(rot_vec_1) rot_vec_2 = vec / np.linalg.norm(vec) @@ -595,4 +677,3 @@ def make_perp(vec): intermediate = rot_1.apply(vec) final = rot_2.apply(intermediate) return final - diff --git a/icecube_tools/point_source_analysis/point_source_analysis.py b/icecube_tools/point_source_analysis/point_source_analysis.py index c738159..98364c7 100644 --- a/icecube_tools/point_source_analysis/point_source_analysis.py +++ b/icecube_tools/point_source_analysis/point_source_analysis.py @@ -582,7 +582,7 @@ def combine_outputs(cls, *paths, events: Events = None): ns_merror.append(input["ns_merror"]) fit_ok.append(input["fit_ok"]) ntrials += int(np.sum(input["fit_ok"])) - if isinstance(cls, MapScanTSDistribution): + if cls == MapScanTSDistribution: try: assert np.isclose(declination, input["dec_test"]) except NameError: @@ -604,7 +604,7 @@ def combine_outputs(cls, *paths, events: Events = None): output["ns_merror"] = np.vstack(ns_merror) output["index_merror"] = np.vstack(index_merror) output["ntrials"] = ntrials - if isinstance(cls, MapScanTSDistribution): + if cls == MapScanTSDistribution: output["ra_test"] = np.array([ra]) output["dec_test"] = np.array([declination]) else: diff --git a/icecube_tools/point_source_likelihood/spatial_likelihood.py b/icecube_tools/point_source_likelihood/spatial_likelihood.py index 5cea47a..a43c650 100644 --- a/icecube_tools/point_source_likelihood/spatial_likelihood.py +++ b/icecube_tools/point_source_likelihood/spatial_likelihood.py @@ -32,15 +32,14 @@ def __call__(self): pass - class EventDependentSpatialGaussianLikelihood(SpatialLikelihood): def __init__(self, sigma=2): """ :param sigma: Upper limit of angular distance to considered events """ - #TODO actually implement this somehow - #convert p to sigma if necessary... - #omit for now + # TODO actually implement this somehow + # convert p to sigma if necessary... + # omit for now self._sigma = sigma # @profile @@ -49,10 +48,10 @@ def __call__( ang_err: np.ndarray, ra: np.ndarray, dec: np.ndarray, - source_coord: Tuple[float, float] + source_coord: Tuple[float, float], ): """ - Use the neutrino energy to determine sigma and + Use the neutrino energy to determine sigma and evaluate the likelihood. P(x_i | x_s) = (1 / (2pi * sigma^2)) * exp( |x_i - x_s|^2/ (2*sigma^2) ) @@ -67,7 +66,7 @@ def __call__( src_ra, src_dec = source_coord - norm = 0.5 / (np.pi * sigma_rad ** 2) + norm = 0.5 / (np.pi * sigma_rad**2) # Calculate the cosine of the distance of the source and the event on # the sphere. @@ -85,7 +84,7 @@ def __call__( dist = np.exp(-0.5 * (r / sigma_rad) ** 2) - return norm * dist + return r / np.sin(r) * norm * dist class DataDrivenBackgroundSpatialLikelihood(SpatialLikelihood): @@ -102,10 +101,13 @@ def __init__(self, period: str): cosz_bins = aeff.cos_zenith_bins self._sin_dec_bins = np.sort(-cosz_bins) self._dec_bins = np.arcsin(self._sin_dec_bins) - self.hist = np.histogram(np.sin(self._events.dec[self._period]), bins=self._sin_dec_bins, density=True) + self.hist = np.histogram( + np.sin(self._events.dec[self._period]), + bins=self._sin_dec_bins, + density=True, + ) self.likelihood = rv_histogram(self.hist, density=True) - def __call__(self, dec: np.ndarray): """ Returns likelihood for each provided event, 2pi accounts for uniform RA distribution. @@ -114,7 +116,6 @@ def __call__(self, dec: np.ndarray): return self.likelihood.pdf(np.sin(dec)) / (2 * np.pi) - class SpatialGaussianLikelihood(SpatialLikelihood): """ Spatial part of the point source likelihood. @@ -125,20 +126,19 @@ class SpatialGaussianLikelihood(SpatialLikelihood): def __init__(self, angular_resolution): """ Spatial part of the point source likelihood. - + P(x_i | x_s) where x is the direction (unit_vector). - - :param angular_resolution; Angular resolution of detector [deg]. + + :param angular_resolution; Angular resolution of detector [deg]. """ # @TODO: Init with some sigma as a function of E? self._sigma = angular_resolution - def __call__(self, ra, dec, source_coord): """ - Use the neutrino energy to determine sigma and + Use the neutrino energy to determine sigma and evaluate the likelihood. P(x_i | x_s) = (1 / (2pi * sigma^2)) * exp( |x_i - x_s|^2/ (2*sigma^2) ) @@ -151,7 +151,7 @@ def __call__(self, ra, dec, source_coord): src_ra, src_dec = source_coord - norm = 0.5 / (np.pi * sigma_rad ** 2) + norm = 0.5 / (np.pi * sigma_rad**2) # Calculate the cosine of the distance of the source and the event on # the sphere. @@ -172,26 +172,25 @@ def __call__(self, ra, dec, source_coord): return norm * dist - class EnergyDependentSpatialGaussianLikelihood(SpatialLikelihood): """ - Energy dependent spatial likelihood. Uses AngularResolution - specified for given spectral indicies. For example in the 2015 + Energy dependent spatial likelihood. Uses AngularResolution + specified for given spectral indicies. For example in the 2015 data release angular resolution plots. - + The atmospheric spectrum is approximated as a power law with a single spectral index. """ def __init__(self, angular_resolution_list, index_list): """ - Energy dependent spatial likelihood. Uses AngularResolution - specified for given spectral indicies. For example in the 2015 + Energy dependent spatial likelihood. Uses AngularResolution + specified for given spectral indicies. For example in the 2015 data release angular resolution plots. - + The atmospheric spectrum is approximated as a power law with a single spectral index. - + :param angular_resolution_list: List of AngularResolution instances. :param index_list: List of corresponding spectral indices """ @@ -202,7 +201,7 @@ def __init__(self, angular_resolution_list, index_list): def _get_sigma(self, reco_energy, index): """ - Return the expected angular resolution for a + Return the expected angular resolution for a given reconstrcuted energy and spectral index. :param reco_energy: Reconstructed energy [GeV] @@ -220,7 +219,7 @@ def _get_sigma(self, reco_energy, index): def get_low_res(self): """ - Representative lower resolution + Representative lower resolution at fixed low energy and bg index. To be used in PointSourceLikelihood. @@ -247,7 +246,7 @@ def __call__(self, ra, dec, source_coord, reco_energy, index=2.0): sigma_rad = np.deg2rad(self._get_sigma(e, index)) - norm = 0.5 / (np.pi * sigma_rad ** 2) + norm = 0.5 / (np.pi * sigma_rad**2) # Calculate the cosine of the distance of the source and the event on # the sphere. diff --git a/icecube_tools/utils/data.py b/icecube_tools/utils/data.py index 8e8262c..f7e9765 100644 --- a/icecube_tools/utils/data.py +++ b/icecube_tools/utils/data.py @@ -831,9 +831,7 @@ def _sort(self): for p in self._periods: self._reco_energy[p] = np.power(10, self.events[p][:, self.reco_energy_]) - self._ang_err[p] = get_theta_p( - get_kappa(self.events[p][:, self.ang_err_], 0.5) - ) + self._ang_err[p] = self.events[p][:, self.ang_err_] self._ra[p] = np.deg2rad(self.events[p][:, self.ra_]) self._dec[p] = np.deg2rad(self.events[p][:, self.dec_]) self._mjd[p] = self.events[p][:, self.mjd_]