Skip to content
This repository has been archived by the owner on May 17, 2021. It is now read-only.

Somewhat updated to newer sunpy and coordinates #36

Closed
wants to merge 13 commits into from
37 changes: 19 additions & 18 deletions data_hough_detect.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

from visualize import visualize
import eitwaveutils
import copy
Expand All @@ -8,12 +7,12 @@
from sunpy.time import TimeRange, parse_time
from sunpy.net import hek
import os
import numpy as np


def main(source_data='.jp2',
def main(source_data='.fits',
time_range=TimeRange('2011/10/01 09:45:00', '2011/10/01 10:15:59'),
algorithm='hough', feed_directory='~/Data/eitwave/jp2/20111001/',
use_pickle='maps.pkl',diff_type='running'):
algorithm='hough', feed_directory='/home/hayesla/fits/data/',
use_pickle=None,diff_type='running'):
'''
source_data { jp2 | fits | test }
look for helioviewer JP2 files, FITS files, or load the test data
Expand All @@ -35,9 +34,9 @@ def main(source_data='.jp2',
if source_data == 'test':
# where to store those data
maps = test_wave2d()
elif source_data == '.jp2':
elif source_data == '.fits':
# where to store those data
data_storage = "~/Data/eitwave/jp2/"
data_storage = "/home/hayesla/fits/"

if not os.path.exists(os.path.expanduser(data_storage)):
os.makedirs(os.path.expanduser(data_storage))
Expand All @@ -64,7 +63,7 @@ def main(source_data='.jp2',
# Assumes that the necessary files are already present

filelist = eitwaveutils.listdir_fullpath(feed_directory,
filetype ='jp2')
filetype ='fits')

#filter to only grab the data files with the source_data extn in the directory
files_tmp = []
Expand All @@ -75,16 +74,16 @@ def main(source_data='.jp2',

# reduce the number of files to those that happen after the flare has
# started
files = []
for f in files_tmp:
fhv = f.split(os.sep)[-1]
if eitwaveutils.hv_filename2datetime(fhv) > \
parse_time(flare['event_starttime']):
files.append(f)
print('Number of files :' + str(len(files)))
if len(files) == 0:
print('No files found. Returning.')
return None
#files = []
#for f in files_tmp:
# fhv = f.split(os.sep)[-1]
# if eitwaveutils.hv_filename2datetime(fhv) > \
# parse_time(flare['event_starttime']):
# files.append(f)
#print('Number of files :' + str(len(files)))
#if len(files) == 0:
#print('No files found. Returning.')
#return None

# Define the transform parameters
# params = eitwaveutils.params(flare='test')
Expand All @@ -110,6 +109,8 @@ def main(source_data='.jp2',
long_maps.append(m)
maps=long_maps

for i in range(len(maps)):
maps[i] = np.log(maps[i] + abs(maps[i].min()) + 1)
# Unravel the maps
new_maps = eitwaveutils.map_unravel(maps, params, verbose=True)

Expand Down
199 changes: 199 additions & 0 deletions data_hough_detect2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
from visualize import visualize
import eitwaveutils
import copy
import os
import cPickle as pickle
from test_wave2d import test_wave2d
from sunpy.time import TimeRange, parse_time
from sunpy.net import hek
import os
import numpy as np

def main(source_data='.fits',
time_range=TimeRange('2011/10/01 09:45:00', '2011/10/01 10:15:59'),
algorithm='hough', feed_directory='/home/hayesla/fits/data/',
use_pickle=None,diff_type='running'):
'''
source_data { jp2 | fits | test }
look for helioviewer JP2 files, FITS files, or load the test data
respectively

time_range : a TimeRange object
time range that is searched for EUV waves

feed_directory
If set to a string, look in this directory for the jp2 files. Assumes that
all the JP2 files are relevant to the flare detection.

algorithm: { 'hough' : 'phough' }
algorithm used to find the wave
'''
if feed_directory != None:
feed_directory = os.path.expanduser(feed_directory)

if source_data == 'test':
# where to store those data
maps = test_wave2d()
elif source_data == '.fits':
# where to store those data
data_storage = "/home/hayesla/fits/"

if not os.path.exists(os.path.expanduser(data_storage)):
os.makedirs(os.path.expanduser(data_storage))

# Query the HEK for flare information we need
client = hek.HEKClient()
hek_result = client.query(hek.attrs.Time(time_range.t1, time_range.t2),
hek.attrs.EventType('FL'))
#hek.attrs.FRM.Name == '')
if hek_result is None:
# no flares, no analysis possible
return None

# Flares!
print('Number of flares found = ' + str(len(hek_result)))

for flare in hek_result[10:11]:

if feed_directory is None:
print('Acquiring data for flare')
filelist = eitwaveutils.acquire_data(data_storage, source_data,
flare)
else:
# Assumes that the necessary files are already present

filelist = eitwaveutils.listdir_fullpath(feed_directory,
filetype ='fits')

#filter to only grab the data files with the source_data extn in the directory
files_tmp = []
for f in filelist:
if f.endswith(source_data):
files_tmp.append(f)
files = files_tmp

# reduce the number of files to those that happen after the flare has
# started
#files = []
#for f in files_tmp:
# fhv = f.split(os.sep)[-1]
# if eitwaveutils.hv_filename2datetime(fhv) > \
# parse_time(flare['event_starttime']):
# files.append(f)
#print('Number of files :' + str(len(files)))
#if len(files) == 0:
#print('No files found. Returning.')
#return None

# Define the transform parameters
# params = eitwaveutils.params(flare='test')
params = eitwaveutils.params(flare)

# read in files and accumulate them
if use_pickle != None:
# load in a pickle file of the data
pfile = open(feed_directory + use_pickle, 'rb')
a = pickle.load(pfile)
maps = a[0]
new_maps = a[1]
diffs = a[2]
pfile.close()
else:
maps = eitwaveutils.accumulate(files[0:30], accum=1, nsuper=4,
verbose=True)

#temporary fix for exposure control and S/N changes
long_maps = []
for m in maps:
if m.exposure_time > 2.0:
long_maps.append(m)
maps=long_maps
for i in range(len(maps)):
maps[i] = np.sqrt((maps[i] + abs(maps[i].min())+1))



# Unravel the maps
new_maps = eitwaveutils.map_unravel(maps, params, verbose=True)

#sometimes unravelling maps leads to slight variations in the unraveled
#image dimensions. check dimensions of maps and resample to dimensions
#of first image in sequence if need be.
new_maps = eitwaveutils.check_dims(new_maps)

# calculate the differences
if diff_type == 'base':
diffs=eitwaveutils.map_basediff(new_maps)
else:
diffs = eitwaveutils.map_diff(new_maps)

# save the outpout
#output = open(feed_directory + 'maps.pkl', 'wb')
##pickle.dump([maps, new_maps, diffs], output, protocol=0)
#output.close()

# Unravel the maps
#new_maps = eitwaveutils.map_unravel(maps, params, verbose=True)

#sometimes unravelling maps leads to slight variations in the unraveled
#image dimensions. check dimensions of maps and resample to dimensions
#of first image in sequence if need be.
#new_maps = eitwaveutils.check_dims(new_maps)

# calculate the differences
#diffs = eitwaveutils.map_diff(new_maps)

#generate persistence maps
#persistence_maps = eitwaveutils.map_persistence(diffs)

#determine the threshold to apply to the difference maps.
#diffs > diff_thresh will be True, otherwise False.
threshold_maps = eitwaveutils.map_threshold(new_maps, factor=0.2)

# transform difference maps into binary maps
binary_maps = eitwaveutils.map_binary(diffs, threshold_maps)

if algorithm == 'hough':
# detection based on the hough transform
detection = eitwaveutils.hough_detect(binary_maps, vote_thresh=10)
elif algorithm == 'prob_hough':
# detection based on the probabilistic hough transform. Takes the
# keywords of the probabilistic hough transform - see the documentation
# of skimage.transform.probabilistic_hough (scikit-image.org)
detection = eitwaveutils.prob_hough_detect(binary_maps, threshold=10)

# Remove areas that are too small or that don't have enough detections
detection = eitwaveutils.cleanup(detection,
size_thresh=50,
inv_thresh=8)

#If there is anything left in 'detection', fit a function to the original
#diffmaps in the region defined by 'detection'. Simplest case: fit a
#Gaussian in the y-direction for some x or range of x.
#eitwaveutils.fit_wavefront should probably take the arguments of fitfunc.
#use 'detection' to guess starting fit parameters?

#get just the positive elements of the difference map. Perform fitting on
#these positive diffmaps.
'''posdiffs = copy.deepcopy(diffs)
for i in range(0, len(diffs)):
temp = diffs[i] < 0
posdiffs[i][temp] = 0

#fit a function to the difference maps in the cases where there has been a
#detection
wavefront = eitwaveutils.fit_wavefront(posdiffs, detection)

#strip out the velocity information from the wavefront fitting
velocity = eitwaveutils.wavefront_velocity(wavefront[0])

#strip out the position and width information from the wavefront fitting
pos_width = eitwaveutils.wavefront_position_and_width(wavefront[0])'''

visualize(detection)

return maps, new_maps, diffs, threshold_maps, binary_maps, detection


if __name__ == '__main__':
main()
11 changes: 7 additions & 4 deletions eitwaveutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def params(flare,**kwargs):
flare_event_coord2 = flare['event_coord2']
elif flare["event_coordunit"] == "arcsec" or flare["event_coordunit"] == "arcseconds":
info = pb0r(flare["event_starttime"])
flare_coords = convert_hcc_hg(info["sd"] / 60.0,
flare_coords = convert_hcc_hg(#info["sd"] / 60.0,
info["b0"], info["l0"],
flare['event_coord1'] / 3600.0,
flare['event_coord2'] / 3600.0)
(flare['event_coord1'] / 3600.0),
(flare['event_coord2'] / 3600.0))
flare_event_coord1 = flare_coords[0]
flare_event_coord2 = flare_coords[1]

Expand Down Expand Up @@ -369,7 +369,7 @@ def map_binary(diffs, threshold_maps):

'''

def hough_detect(diffs, vote_thresh=12):
def hough_detect(diffs, vote_thresh=15):
""" Use the Hough detection method to detect lines in the data.
With enough lines, you can fill in the wave front."""
detection = []
Expand All @@ -383,7 +383,10 @@ def hough_detect(diffs, vote_thresh=12):
indices = (transform > vote_thresh ).nonzero()
distances = d[indices[0]]
theta = theta[indices[1]]
n =len(indices[1])


print("Found " + str(n) + " lines.")
# Perform the inverse transform to get a series of rectangular
# images that show where the wavefront is.
# Create a map which is the same as the
Expand Down
Loading