Skip to content

Commit

Permalink
Merge branch 'v1.2_tomerge'
Browse files Browse the repository at this point in the history
# Conflicts:
#	pipelines/LOFAR_quality.py
  • Loading branch information
henedler committed Jul 25, 2024
2 parents 76d6b9a + 4dc9e8c commit 5c6a049
Show file tree
Hide file tree
Showing 122 changed files with 7,364 additions and 506 deletions.
Empty file modified .gitignore
100644 → 100755
Empty file.
2 changes: 1 addition & 1 deletion LiLF/lib_cat.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import numpy as np
import copy
import logging
from astropy.table import Table, QTable, Column
from astropy.table import Table, Column
from astropy.wcs import WCS
import astropy.units as u
from astropy.units import UnitConversionError
Expand Down
88 changes: 55 additions & 33 deletions LiLF/lib_dd.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@

class Direction(object):

def __init__(self, name):
def __init__(self, name, soltypes=['ph','ph1','ph2','fr','amp1','amp2']):
self.name = name
self.soltypes = soltypes
self.position = None # [deg, deg]
self.size = None # deg (1 value)
self.localrms = None # Jy/b (1 value)
Expand All @@ -26,7 +27,7 @@ def __init__(self, name):
self.region_file = None

self.model = {}
self.h5parms = {'ph':[],'fr':[],'amp1':[],'amp2':[]}
self.h5parms = {soltype:[] for soltype in self.soltypes}

# for debug
self.avg_t = 0
Expand All @@ -38,7 +39,7 @@ def clean(self):
# TODO: remove files?

# associated h5parms
self.h5parms = {'ph':[],'fr':[],'amp1':[],'amp2':[]}
self.h5parms = {soltype:[] for soltype in self.soltypes}

def set_region(self, loc):
"""
Expand Down Expand Up @@ -74,7 +75,7 @@ def set_model(self, root, typ, apply_region=True):

if apply_region:
region_file = self.get_region()
for model_file in glob.glob(root+'*[0-9]-model.fits'):
for model_file in glob.glob(root+'*[0-9]-model.fits') + glob.glob(root+'*[0-9]-model-pb.fits'):
lib_img.blank_image_reg(model_file, region_file, model_file, inverse=True, blankval=0.)

self.model[typ] = root
Expand All @@ -93,15 +94,14 @@ def add_h5parm(self, typ, h5parmFile):
typ can be 'ph', 'fr', 'amp1', or 'amp2'
h5parmFile: filename
"""
assert (typ == 'ph' or typ == 'fr' or typ == 'amp1' or typ == 'amp2')
assert typ in self.soltypes
self.h5parms[typ].append(h5parmFile)

def get_h5parm(self, typ, pos=-1):
"""
typ can be 'ph', 'fr', 'amp1', or 'amp2'
pos: the cycle from 0 to last added, negative numbers to search backwards, if non exists returns None
"""
assert (typ == 'ph' or typ == 'fr' or typ == 'amp1' or typ == 'amp2')
assert typ in self.soltypes
l = self.h5parms[typ]
try:
return l[pos]
Expand Down Expand Up @@ -177,7 +177,7 @@ def __init__(self, coords, fluxes, kernel_size=0.2, look_distance=0.3, grouping_
look_distance: max distance to look for nearby sources [deg]
grouping_distance: [deg]
"""
self.coords = np.array(coords)
self.coords = np.array([SkyCoord(ra=ra*u.deg, dec=dec*u.deg, frame='fk5') for ra,dec in coords])
self.fluxes = fluxes
self.kernel_size = kernel_size # deg
self.look_distance = look_distance # deg
Expand All @@ -187,17 +187,20 @@ def __init__(self, coords, fluxes, kernel_size=0.2, look_distance=0.3, grouping_
self.clusters = []
logger.debug("Grouper: kernel_size=%.1f; look_distance=%.1f; grouping_distance=%.2f" % (kernel_size,look_distance,grouping_distance) )

def euclid_distance(self, coord, coords):
def sky_distance(self, coord, coords):
"""
Simple ditance from coord to all coords
"""
return np.sqrt(np.sum((coord - coords)**2, axis=1))
#dist1 = np.sqrt(np.sum((coord - coords)**2, axis=1))
catalogue = SkyCoord(ra = [c.ra for c in coords], dec = [c.dec for c in coords])
dist = np.array([d.deg for d in coord.separation(catalogue)])
return dist

def neighbourhood_points(self, centroid, coords, max_distance):
"""
Find close points, this reduces the load
"""
distances = self.euclid_distance(centroid, coords)
distances = self.sky_distance(centroid, coords)
#print('Evaluating: [%s vs %s] yield dist=%.2f' % (x, x_centroid, distance_between))
return np.flatnonzero(distances < max_distance)

Expand All @@ -206,6 +209,31 @@ def gaussian_kernel(self, distance):
"""
return (1/(self.kernel_size*np.sqrt(2*np.pi))) * np.exp(-0.5*((distance / self.kernel_size))**2)

def weighted_spherical_mean(self, coords_list, weights):
"""
Calculate the weighted spherical mean of a list of SkyCoord objects.
Parameters:
- coords_list: List of SkyCoord objects.
- weights: List of weights corresponding to each coordinate.
Returns:
- Weighted spherical mean SkyCoord object.
"""
# Convert coordinates to Cartesian representation
cartesian_coords = np.array([coord.represent_as('cartesian').xyz.value for coord in coords_list])

# Apply weights to Cartesian coordinates
weighted_cartesian_coords = cartesian_coords * np.array(weights)[:, np.newaxis]

# Calculate the weighted mean Cartesian coordinates
weighted_mean_cartesian = np.sum(weighted_cartesian_coords, axis=0) / np.sum(weights)

# Convert back to spherical coordinates
mean_coord = SkyCoord(x=weighted_mean_cartesian[0], y=weighted_mean_cartesian[1], z=weighted_mean_cartesian[2], representation_type='cartesian').represent_as('spherical')

return SkyCoord(ra=mean_coord.lon.deg*u.deg, dec=mean_coord.lat.deg*u.deg, frame='fk5')

def run(self):
"""
Run the algorithm
Expand All @@ -216,27 +244,23 @@ def run(self):
for i, x in enumerate(self.coords):
### Step 1. For each datapoint x in X, find the neighbouring points N(x) of x.
idx_neighbours = self.neighbourhood_points(x, self.coords, max_distance = self.look_distance)

### Step 2. For each datapoint x in X, calculate the mean shift m(x).
distances = self.euclid_distance(self.coords[idx_neighbours], x)
distances = self.sky_distance(x, self.coords[idx_neighbours])
weights = self.gaussian_kernel(distances)
weights *= self.fluxes[idx_neighbours]**2 # multiply by flux**1.5 to make bright sources more important
numerator = np.sum(weights[:,np.newaxis] * self.coords[idx_neighbours], axis=0)
denominator = np.sum(weights)
new_x = numerator / denominator

new_x = self.weighted_spherical_mean(self.coords[idx_neighbours], weights)
#print("xnewx",x,new_x)

### Step 3. For each datapoint x in X, update x <- m(x).
self.coords[i] = new_x

self.past_coords.append(np.copy(self.coords))

#if it>1:
# print (np.max(self.euclid_distance(self.coords,self.past_coords[-2])))

# if things changes little, brak
if it > 1 and np.max(self.euclid_distance(self.coords, self.past_coords[-2])) < self.grouping_distance/2.:
break

if it > 1:
dists = [self.coords[i].separation(self.past_coords[-2][i]).deg for i in range(len(self.coords))]
if np.max(dists) < self.grouping_distance/2.:
break

def grouping(self):
"""
Expand Down Expand Up @@ -299,31 +323,29 @@ def plot(self):
for i, X in enumerate(self.past_coords):
fig = plt.figure(figsize=(8, 8))
fig.subplots_adjust(wspace=0)
ax = fig.add_subplot(111)

initial_x = self.past_coords[0][:,0]
initial_y = self.past_coords[0][:,1]
ax = fig.add_subplot(111, polar=True)

initial_x = np.array([s.ra.wrap_at(180 * u.deg).rad for s in self.past_coords[0]])
initial_y = np.array([np.pi/2 - s.dec.rad for s in self.past_coords[0]])
ax.plot(initial_x,initial_y,'k.')
ax.plot(X[:,0],X[:,1],'ro')
ax.plot([s.ra.wrap_at(180 * u.deg).rad for s in X], [np.pi/2-s.dec.rad for s in X],'ro')

ax.set_xlim( np.min(initial_x), np.max(initial_x) )
ax.set_ylim( np.min(initial_y), np.max(initial_y) )
ax.set_rorigin(-1*np.min(initial_y))

#print ('Saving plot_%i.png' % i)
ax.set_xlim(ax.get_xlim()[::-1]) # reverse RA
fig.savefig('grouping_%00i.png' % i, bbox_inches='tight')

# plot clustering
fig = plt.figure(figsize=(8, 8))
fig.subplots_adjust(wspace=0)
ax = fig.add_subplot(111)
ax = fig.add_subplot(111, polar=True)
for cluster in self.clusters:
ax.plot(initial_x[cluster],initial_y[cluster], marker='.', linestyle='')

ax.set_xlim( np.min(initial_x), np.max(initial_x) )
ax.set_ylim( np.min(initial_y), np.max(initial_y) )
ax.set_xlim(ax.get_xlim()[::-1]) # reverse RA
ax.set_rorigin(-1*np.min(initial_y))

logger.info('Plotting: grouping_clusters.png')
fig.savefig('grouping_clusters.png', bbox_inches='tight')
Expand Down
2 changes: 1 addition & 1 deletion LiLF/lib_dd_parallel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os, sys, itertools
import os, sys
import numpy as np
from astropy.coordinates import SkyCoord
from astropy.io import fits as pyfits
Expand Down
2 changes: 1 addition & 1 deletion LiLF/lib_h5.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os, sys
import numpy as np
from losoto.h5parm import h5parm, Soltab
from losoto.h5parm import h5parm
from LiLF.lib_log import logger

def repoint(h5parmFile, dirname, solsetname='sol000'):
Expand Down
59 changes: 58 additions & 1 deletion LiLF/lib_img.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
from scipy.ndimage.measurements import label
from LiLF import make_mask, lib_util
from LiLF.lib_log import logger
import astropy.io.fits as fits
import astropy.wcs as wcs

class Image(object):
def __init__(self, imagename, userReg = None, beamReg= None ):
"""
userMask: keep this region when making masks
userReg: keep this region when making masks
BeamReg: ds9 region file of the beam
"""
if 'MFS' in imagename: suffix = '-MFS-image.fits'
Expand All @@ -31,6 +33,52 @@ def __init__(self, imagename, userReg = None, beamReg= None ):
self.userReg = userReg
self.beamReg = beamReg

def calc_flux(self, img, mask):
"""
Get flux inside given region. Adapted from Martin Hardcastle's radiomap class
"""

fitsfile = img
extract = mask

phdu = fits.open(fitsfile)
head, lhdu = flatten(phdu)
gfactor = 2.0 * np.sqrt(2.0 * np.log(2.0))
f = phdu[0]
prhd = phdu[0].header
units = prhd.get('BUNIT')
if units is None:
units = prhd.get('UNIT')
if units != 'JY/BEAM' and units != 'Jy/beam':
print('Warning: units are', units, 'but code expects JY/BEAM')
bmaj = prhd.get('BMAJ')
bmin = prhd.get('BMIN')

bmaj = np.abs(bmaj)
bmin = np.abs(bmin)

w = wcs.WCS(prhd)
cd1 = -w.wcs.cdelt[0]
cd2 = w.wcs.cdelt[1]
if ((cd1 - cd2) / cd1) > 1.0001 and ((bmaj - bmin) / bmin) > 1.0001:
print('Pixels are not square (%g, %g) and beam is elliptical' % (cd1, cd2))

bmaj /= cd1
bmin /= cd2
area = 2.0 * np.pi * (bmaj * bmin) / (gfactor * gfactor)

d = [lhdu]

region = pyregion.open(extract).as_imagecoord(prhd)

for i, n in enumerate(d):
mask = region.get_mask(hdu=f, shape=np.shape(n))
data = np.extract(mask, d)
nndata = data[~np.isnan(data)]
flux = np.sum(nndata) / area

return flux

def rescaleModel(self, funct_flux):
"""
Rescale the model images to a certain total flux estimated using funct_flux(nu)
Expand All @@ -55,7 +103,16 @@ def rescaleModel(self, funct_flux):
fits.writeto(model_img, overwrite=True)
fits.close()

def nantozeroModel(self):
"""
Set nan to 0 in all model images
"""
for modelimage in sorted(glob.glob(self.root+'*model*.fits')):
with pyfits.open(modelimage, mode='update') as fits:
for hdu in fits:
hdu.data[hdu.data != hdu.data] = 0

# TODO: separate makemask (using breizorro) and makecat (using bdsf)
def makeMask(self, threshpix=5, atrous_do=False, rmsbox=(100,10), remove_extended_cutoff=0., only_beam=False, maskname=None,
write_srl=False, write_gaul=False, write_ds9=False, mask_combine=None):
"""
Expand Down
2 changes: 1 addition & 1 deletion LiLF/lib_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, pipename):# logfile = "pipeline.logging", log_dir = "logs"):
logger.propagate = False
logger.handlers = []

timestamp = time.strftime('%Y-%m-%d_%H:%M', time.localtime())
timestamp = time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime())
self.logfile = pipename+'_'+timestamp+'.logger'
self.log_dir = 'logs_'+pipename+'_'+timestamp
os.makedirs(self.log_dir)
Expand Down
Loading

0 comments on commit 5c6a049

Please sign in to comment.