Skip to content

Commit

Permalink
Merge pull request #34 from EGaraldi/better-tests
Browse files Browse the repository at this point in the history
Better tests
  • Loading branch information
EGaraldi authored Sep 3, 2024
2 parents 132978c + 9082c05 commit 50dec9f
Show file tree
Hide file tree
Showing 12 changed files with 292 additions and 15 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ name: Python package

on:
push:
branches: [ "master" ]
branches: [ "master", "better-tests" ]
pull_request:
branches: [ "master" ]

Expand All @@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v3
Expand All @@ -37,4 +37,4 @@ jobs:
# flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest --disable-pytest-warnings
pytest --disable-pytest-warnings tests
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ HOWTO
MANIFEST
corecon/__init__.py.bkp
time_of_last_update.dat

tests/__pycache__
4 changes: 3 additions & 1 deletion corecon/DataEntryClass.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,17 @@ def __init__(self,
self.extra_data.append(k)
self.extra_data = np.array(self.extra_data)

#create named entries
#create named entries and dimensions_descriptors_internal
setattr(self, parent_field, values[:])
self.dimensions_descriptors_internal = []
for k, descr in enumerate(dimensions_descriptors):
descr = descr.replace(" ", "_")
descr = ''.join(ch if ch.isalnum() or ch=="_" else '' for ch in descr)
if ndim==1:
setattr(self, descr, axes)
else:
setattr(self, descr, axes[:,k])
self.dimensions_descriptors_internal.append(descr)

def __repr__(self):
"""string describing the class
Expand Down
13 changes: 8 additions & 5 deletions corecon/FieldClass.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ def __init__(self, *arg, **kw):
self.field_description = None
self.field_units = None
self.field_remarks = None

# return a copy of the item, to preserve the originally-loaded item
#def __getitem__(self, key):
# return copy.deepcopy(super().__getitem__(key))

def __str__(self):
return super().__str__()
Expand Down Expand Up @@ -92,7 +96,7 @@ def filter_by_redshift_range(self, zmin, zmax):
dict_zslice.field_description = self.field_description

for k in self.keys():
if not 'redshift' in self[k].dimensions_descriptors:
if not 'redshift' in self[k].dimensions_descriptors_internal:
print("WARNING: missing redshift dimension for entry %s. Skipping it."%(k))
continue

Expand All @@ -106,7 +110,7 @@ def filter_by_redshift_range(self, zmin, zmax):
dict_zslice[k].upper_lim = dict_zslice[k].upper_lim[w]
dict_zslice[k].lower_lim = dict_zslice[k].lower_lim[w]
#variables auto-created from "axes"
for dd in self[k].dimensions_descriptors:
for dd in self[k].dimensions_descriptors_internal:
_temp = getattr(dict_zslice[k], dd)
setattr(dict_zslice[k], dd, _temp[w])
#variables auto-created from"values"
Expand Down Expand Up @@ -160,7 +164,7 @@ def get_lower_limits(self):
dict_lls[k].upper_lim = dict_lls[k].upper_lim[self[k].lower_lim]
dict_lls[k].lower_lim = dict_lls[k].lower_lim[self[k].lower_lim]
#variables auto-created from "axes"
for dd in self[k].dimensions_descriptors:
for dd in self[k].dimensions_descriptors_internal:
_temp = getattr(dict_lls[k], dd)
setattr(dict_lls[k], dd, _temp[self[k].lower_lim])
#variables auto-created from"values"
Expand Down Expand Up @@ -195,7 +199,7 @@ def get_upper_limits(self):
dict_uls[k].upper_lim = dict_uls[k].upper_lim[self[k].upper_lim]
dict_uls[k].lower_lim = dict_uls[k].lower_lim[self[k].upper_lim]
#variables auto-created from "axes"
for dd in self[k].dimensions_descriptors:
for dd in self[k].dimensions_descriptors_internal:
_temp = getattr(dict_uls[k], dd)
setattr(dict_uls[k], dd, _temp[self[k].upper_lim])
#variables auto-created from"values"
Expand All @@ -207,4 +211,3 @@ def get_upper_limits(self):
setattr(dict_uls[k], e, _temp[self[k].upper_lim])

return dict_uls

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
dictionary_tag = "Livermore et a. 2017"
dictionary_tag = "Livermore et al. 2017"

reference = "Livermore, Finkelstein, Lotz; ApJ. 835, 113 (2017)"

Expand Down
Binary file modified corecon/data/data.zip
Binary file not shown.
7 changes: 6 additions & 1 deletion corecon/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,12 @@ def _expand_field(field, shape):
extra_data = {}
for k in local_var_dict.keys():
extra_data[k] = np.array(local_var_dict[k], dtype=object)

#convert to float whenever possible
try:
extra_data[k] = extra_data[k].astype(np.float64)
except ValueError:
pass

#expand None's, True's, and False's (this will also convert them to array)
err_up = _expand_field(err_up , values.shape)
err_down = _expand_field(err_down , values.shape)
Expand Down
3 changes: 0 additions & 3 deletions test_import.py

This file was deleted.

Empty file added tests/__init__.py
Empty file.
76 changes: 76 additions & 0 deletions tests/test_DataEntryClass.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import corecon as crc
import numpy as np


def test_swap_limits():
uvlf = crc.get("UVLF")
h23 = uvlf["Harikane et al. 2023"]

h23.swap_limits()
assert np.all( h23.lower_lim == [True, True, False, False, False, False, True, True, False, False, False, False, True, False]),\
"Problem detected in DataEntry.swap_limit"

def test_swap_errors():
uvlf = crc.get("UVLF")
b17 = uvlf["Bouwens et al. 2017"]

b17.swap_errors()
assert np.all(b17.err_down == [0.30103 , 0.15970084, 0.19629465, 0.12493874, 0.09691001, 0.07918125, 0.28546222, 0.03817964,
0.29459589, 0.34325599, 0.26787824, 0.35483287, 0.09089366, 0.78861996, 1.14378603, 1.58134989, 2.10047281] ), \
"Problem detected in DataEntry.swap_errors"

def test_nan_to_values_all_fields():
uvlf = crc.get("UVLF")
k22 = uvlf["Kauffmann et al. 2022"]

k22.nan_to_values("all", 1e-10)
assert np.all(k22.err_down == [1.0000000e-10, 3.5139073e-01, 3.4584234e-01, 4.0866387e-01, 5.4104580e-01, 1.0000000e+02]),\
"Problem detected in DataEntry.nan_to_vals using 'all' as first argument"
assert np.all(k22.err_up == [1.0000000e-10, 1.9165904e-01, 1.9005692e-01, 2.0676008e-01, 2.3357745e-01, 3.4987856e-01]),\
"Problem detected in DataEntry.nan_to_vals using 'all' as first argument"

def test_nan_to_values_one_field():
uvlf = crc.get("UVLF")
k22 = uvlf["Kauffmann et al. 2022"]

k22.nan_to_values("err_down", 1e-10)
assert np.all(k22.err_down == [1.0000000e-10, 3.5139073e-01, 3.4584234e-01, 4.0866387e-01, 5.4104580e-01, 1.0000000e+02]),\
"Problem detected in DataEntry.nan_to_vals using a single field as first argument"

def test_nan_to_values_list_fields():
uvlf = crc.get("UVLF")
k22 = uvlf["Kauffmann et al. 2022"]

k22.nan_to_values(["err_down", "err_up"], 1e-10)
assert np.all(k22.err_down == [1.0000000e-10, 3.5139073e-01, 3.4584234e-01, 4.0866387e-01, 5.4104580e-01, 1.0000000e+02]),\
"Problem detected in DataEntry.nan_to_vals using a list of fields as first argument"
assert np.all(k22.err_up == [1.0000000e-10, 1.9165904e-01, 1.9005692e-01, 2.0676008e-01, 2.3357745e-01, 3.4987856e-01]),\
"Problem detected in DataEntry.nan_to_vals using a list of fields as first argument"

def test_set_lim_errors():
uvlf = crc.get("UVLF")
h23 = uvlf["Harikane et al. 2023"]

h23.set_lim_errors(5, frac_of_values=False)
assert np.all(h23.err_up == [5.0000000e+00, 5.0000000e+00, 5.2569252e-01, 5.2542593e-01, 3.4821760e-02, 3.9757000e-03,
5.0000000e+00, 5.0000000e+00, 5.2009033e-01, 3.6845013e-01, 2.9921665e-01, 3.7791135e-01,
5.0000000e+00, 3.6835150e-01]),\
"Problem detected in DataEntry.set_lim_errors using frac_of_values=False"

def test_set_lim_errors_frac():
uvlf = crc.get("UVLF")
h23 = uvlf["Harikane et al. 2023"]

h23.set_lim_errors(0.5, frac_of_values=True)
assert np.all(h23.err_up == [-2.0790076 , -2.05760232, 0.52569252, 0.52542593, 0.03482176, 0.0039757 , -2.616422065,
-2.596910015, 0.52009033, 0.36845013, 0.29921665, 0.37791135, -2.808092315, 0.3683515 ]),\
"Problem detected in DataEntry.set_lim_errors using frac_of_values=True"

def test_list_attributes():
uvlf = crc.get("UVLF")
h23 = uvlf["Harikane et al. 2023"]

assert np.all(h23.list_attributes() == ['ndim', 'description','reference','parent_field','url','dimensions_descriptors',
'extracted','axes','values','err_up','err_down','upper_lim','lower_lim','extra_data',
'err_right','err_left','UV_luminosity_function', 'dimensions_descriptors_internal','redshift','M_UV']),\
"Problem detected in DataEntry.list_attributes"
164 changes: 164 additions & 0 deletions tests/test_FieldClass.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import corecon as crc
import numpy as np


def test_get_all_references():
uvlf = crc.get("UVLF")

#We test that *at least* the following references are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
references = ['Ishigaki, Kawamata, Ouchi, Oguri, Shimasaku, Ono; ApJ. 854, 73 (2018)',
'McLeod, Donnan, McLure, Dunlop, Magee, Begley, Carnall, et al.; MNRAS 527, 5004 (2024)',
'Atek, Richard, Kneib, Schaerer; MNRAS 479, 5184 (2018)',
'Oesch, Bouwens, Illingworth, Labbe, Smit, Franx, et al.; ApJ. 786, 108 (2014)',
'Morishita, Trenti, Stiavelli, Bradley, Coe, et al.; ApJ. 867, 150 (2018)',
'Perez-Gonzalez, Costantin, Langeroodi, Rinaldi, Annunziatella, et al.; ApJL 951, L1 (2023)',
'Bouwens, Oesch, Labbe, Illingworth, Fazio, et al.; ApJ. 830, 67 (2016)',
'Livermore, Finkelstein, Lotz; ApJ. 835, 113 (2017)',
'Harikane, Ouchi, Oguri, Ono, Nakajima, Isobe, Umeda, Mawatari, Zhang; ApJS 265, 5 (2023)',
'Atek, Richard, Kneib, Jauzac, Schaerer, Clement, et al.; ApJ 800, 18 (2015)',
'Bouwens, Illingworth, Oesch, Naidu, van Leeuwen, Magee, MNRAS 523, 1009 (2023)',
'McLeod, McLure, Dunlop; MNRAS 459, 3812 (2016)',
'Donnan, McLeod, McLure, Dunlop, Carnall, Cullen, Magee; MNRAS 520, 4554 (2023)',
'Finkelstein, Ryan, Papovich, Dickinson, Song, et al.; ApJ. 810, 71 (2015)',
'Stefanon, Labbe, Bouwens, Oesch, Ashby, Caputi, et al.; ApJ. 883, 99 (2019)',
'Bouwens, Illingworth, Oesch, Trenti, Labbe, et al.; ApJ. 803, 34 (2015)',
'Rojas-Ruiz S., Finkelstein S. L., Bagley M. B., Stevans M., Finkelstein K. D., et al., 2020, ApJ, 891, 146',
'McLure, Dunlop, Bowler, Curtis-Lake, Schenker, et al.; MNRAS 432, 2696 (2013)',
'Bouwens, Oesch, Illingworth, Ellis, Stefanon; ApJ. 843, 129 (2017)',
'Bowler, Jarvis, Dunlop, McLure, McLeod, et al.; MNRAS 493, 2059 (2020)',
'Leung, Bagley, Finkelstein, Ferguson, Koekemoer, Perez-Gonzalez, et al.; ApJL 954, L46 (2023)',
'Donnan, McLeod, Dunlop, McLure, Carnall, Begley, Cullen, et al.; MNRAS 518, 6011 (2023)',
'Bouwens, Stefanon, Brammer, Oesch, Herard-Demanche, Illingworth, et al., MNRAS 523, 1936 (2023)',
'Castellano, Dayal, Pentericci, Fontana, Hutter, et al.; ApJL. 818, L3 (2016)',
'Oesch, Bouwens, Illingworth, Labbe, Stefanon; ApJ. 855, 105 (2018)',
'Bouwens, Illingworth, Ellis, Oesch, Stefanon; ApJ 940, 55 (2022)',
'Bowler, Dunlop, McLure, McCracken, Milvang-Jensen, et al.; MNRAS 452, 1817 (2015)'
]

uvlf_ref = uvlf.get_all_references()

assert len(set(references).difference(uvlf_ref))==0, "Problem detected in Field.get_all_references"





def test_get_all_urls():
uvlf = crc.get("UVLF")

#We test that *at least* the following URLs are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
urls = ['https://iopscience.iop.org/article/10.3847/1538-4357/aaa544',
'https://academic.oup.com/mnras/article/527/3/5004/7408621',
'https://academic.oup.com/mnras/article/479/4/5184/5050078',
'https://iopscience.iop.org/article/10.1088/0004-637X/786/2/108',
'https://iopscience.iop.org/article/10.3847/1538-4357/aae68c',
'https://iopscience.iop.org/article/10.3847/2041-8213/acd9d0/pdf',
'https://iopscience.iop.org/article/10.3847/0004-637X/830/2/67',
'https://iopscience.iop.org/article/10.3847/1538-4357/835/2/113',
'https://iopscience.iop.org/article/10.3847/1538-4365/acaaa9',
'https://ui.adsabs.harvard.edu/abs/2015ApJ...800...18A/abstract',
'https://ui.adsabs.harvard.edu/abs/2023MNRAS.523.1009B/',
'https://academic.oup.com/mnras/article/459/4/3812/2624050',
'https://ui.adsabs.harvard.edu/abs/2023MNRAS.520.4554D/abstract',
'https://iopscience.iop.org/article/10.1088/0004-637X/810/1/71',
'https://iopscience.iop.org/article/10.3847/1538-4357/ab3792',
'https://iopscience.iop.org/article/10.1088/0004-637X/803/1/34',
'https://iopscience.iop.org/article/10.3847/1538-4357/ab7659',
'https://academic.oup.com/mnras/article/432/4/2696/2907730',
'https://iopscience.iop.org/article/10.3847/1538-4357/aa70a4',
'https://academic.oup.com/mnras/article/493/2/2059/5721544',
'https://iopscience.iop.org/article/10.3847/2041-8213/acf365/pdf',
'https://academic.oup.com/mnras/article/518/4/6011/6849970',
'https://ui.adsabs.harvard.edu/abs/2023MNRAS.523.1036B/abstract',
'https://iopscience.iop.org/article/10.3847/2041-8205/818/1/L3',
'https://iopscience.iop.org/article/10.3847/1538-4357/aab03f',
'https://iopscience.iop.org/article/10.3847/1538-4357/ac86d1',
'https://academic.oup.com/mnras/article/452/2/1817/1068199']

uvlf_urls = uvlf.get_all_urls()

assert len(set(urls).difference(uvlf_urls))==0, "Problem detected in Field.get_all_urls"





def test_filter_by_redshift_range():
uvlf = crc.get("UVLF")

#We test that *at least* the following entries are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
entries = ['Ishigaki et al. 2018', 'McLeod et al. 2024', 'Oesch et al. 2014',
'Morishita et al. 2018', 'Perez-Gonzalez et al. 2023',
'Bouwens et al. 2016', 'Harikane et al. 2023',
'Bouwens et al. 2023a', 'McLeod et al. 2016',
'Donnan et al. 2023b', 'Stefanon et al. 2019',
'Bouwens et al. 2015', 'Rojas-Ruiz et al. 2020',
'Bowler et al. 2020', 'Leung et al. 2023', 'Donnan et al. 2023a',
'Bouwens et al. 2023b', 'Oesch et al. 2018', 'Bouwens et al. 2022']

uvlf_zrange = uvlf.filter_by_redshift_range(9,13)

assert len(set(entries).difference(uvlf_zrange))==0, "Problem detected in Field.filter_by_redshift_range (not all entries are returned)"

#then test the slicing is correct
assert np.all(uvlf_zrange['Ishigaki et al. 2018'].redshift == [9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0]), \
"Problem detected in Field.filter_by_redshift_range (slicing of constraints is wrong)"





def test_filter_by_extracted():

uvlf = crc.get("UVLF")

#We test that *at least* the following entries are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
entries = ['Ishigaki et al. 2018', 'Atek et al. 2018', 'Atek et al. 2015', 'McLeod et al. 2016', 'Castellano et al. 2015']

uvlf_ex = uvlf.filter_by_extracted(True)

assert len(set(entries).difference(uvlf_ex))==0, "Problem detected in Field.filter_by_extracted (not all entries are returned)"

for e in entries:
assert uvlf_ex[e].extracted == True, "Problem detected in Field.filter_by_extracted (returned wrong entry/slice)"




def test_get_upper_limits():
xHII = crc.get("x_HII")

#We test that *at least* the following entries are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
entries = ['Schenker et al. 2014','Sobacchi & Mesinger 2015', 'Schroeder et al. 2013', 'Mortlock et al. 2011', 'Mesinger et al. 2015', 'Mason et al. 2019',
'Tilvi et al. 2014', 'Pentericci et al. 2014', 'Bosman et al. 2022', 'Yang et al. 2020b', 'Robertson et al. 2013', 'Hoag et al. 2019']

xHII_ul = xHII.get_upper_limits()

assert len(set(entries).difference(xHII_ul))==0, "Problem detected in Field.get_upper_limits (not all entries are returned)"

for e in entries:
assert np.all(xHII_ul[e].upper_lim), "Problem detected in Field.get_upper_limits (returned wrong entry/slice)"




def test_get_lower_limits():
xHII = crc.get("x_HII")

#We test that *at least* the following entries are returned. In this way, we do not have to update this test every time a
# new constraint is added (or a temporary one updated)
entries = ['Zhu et al. 2022', 'Ouchi et al. 2010', 'Chornock et al. 2013', 'Totani et al. 2006', 'Nakane et al. 2024',
'Lu et al. 2020', 'McGreer et al. 2011','McGreer et al. 2015']

xHII_ll = xHII.get_lower_limits()

assert len(set(entries).difference(xHII_ll))==0, "Problem detected in Field.get_lower_limits (not all entries are returned)"

for e in entries:
assert np.all(xHII_ll[e].lower_lim), "Problem detected in Field.get_lower_limits (returned wrong entry/slice)"

30 changes: 30 additions & 0 deletions tests/test_corecon.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import corecon as crc
import numpy as np

def test_get_fields():

#We test that *at least* the following fields are returned. In this way, we do not have to update this test every time a
# new constraint field is added
fields = ['HII_fraction', 'HeIII_fraction', 'Lya_flux_power_spectrum', 'mean_free_path', 'effective_optical_depth_HI_Lya',
'effective_optical_depth_HeII_Lya', 'HeII_to_HI_column_density_ratio', 'quasar_luminosity_function',
'UV_luminosity_function','IGM_temperature_mean_density','optical_depth_CMB','sfrd',
'Lya_spike_galaxy_correlation','mass_stellar_metallicity_relation','mass_gas_metallicity_relation',
'galaxy_main_sequence','UV_slope','ionizing_photons_production_efficiency','HI_photoionization_rate',
'ionizing_photons_emission_rate','reionization_midpoint','UV_luminosity_density']


crc_fields = crc.get_fields()

assert len(set(fields).difference(crc_fields))==0, "Problem detected in CoReCon.get_fields"



def test_get_field_synonyms():
#We test that *at least* the following synonyms are returned. In this way, we do not have to update this test every time a
# new one is added
synonyms = ['ionized_fraction', 'x_HII', 'f_HII', 'x_ion', 'f_ion', 'ion_frac']

crc_syn = crc.get_field_synonyms("HII_fraction")

assert len(set(synonyms).difference(crc_syn))==0, "Problem detected in CoReCon.get_field_synonym"

0 comments on commit 50dec9f

Please sign in to comment.