diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3804f811..ee90b7ce 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ name: ExoCTK CI -on: [push, pull_request] +on: [pull_request] jobs: @@ -12,7 +12,7 @@ jobs: max-parallel: 5 matrix: os: [ubuntu-latest, macos-latest] - python-version: [3.7, 3.8, 3.9] + python-version: ['3.9', '3.10', '3.11'] steps: - uses: actions/checkout@v2 @@ -42,12 +42,7 @@ jobs: echo " " echo "Installing exoctk package" echo " " - python setup.py develop - - # echo " " - # echo "Reinstall numpy to fix numpy.core.multiarray error" - # echo " " - pip install -U numpy + python setup.py develop --no-deps echo " " echo "Testing package installation" diff --git a/.pyup.yml b/.pyup.yml index 0a8a67c3..fea1c329 100644 --- a/.pyup.yml +++ b/.pyup.yml @@ -29,7 +29,7 @@ search: True # default: empty # allowed: list requirements: - - pip_requirements.txt: + - requirements.txt: # update all dependencies and pin them update: all pin: True @@ -38,7 +38,7 @@ requirements: # requires private repo permissions, even on public repos # default: empty assignees: - - bourque + - hover2pi # configure the branch prefix the bot is using # default: pyup- diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..b17bbe97 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,29 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.9" + # You can also specify other tool versions: + # nodejs: "19" + # rust: "1.64" + # golang: "1.19" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: requirements.txt \ No newline at end of file diff --git a/.rtd-environment.yml b/.rtd-environment.yml deleted file mode 100644 index 28b16e7e..00000000 --- a/.rtd-environment.yml +++ /dev/null @@ -1,21 +0,0 @@ -channels: - - astropy - -dependencies: - - astropy - - matplotlib - - numpy - - scipy - - numba - - h5py - - cython - - bibtexparser - - bokeh - - pandas - - sphinx-astropy - - pip: - - sphinx_rtd_theme - - svo_filters - - sphinx-automodapi - - WTForms - - git+https://github.com/spacetelescope/jwst_gtvt.git diff --git a/ci/setup_conda_env.sh b/ci/setup_conda_env.sh index fab31ae1..b053fd81 100755 --- a/ci/setup_conda_env.sh +++ b/ci/setup_conda_env.sh @@ -1,9 +1,4 @@ #!/bin/bash -echo "Creating base conda environment for Python $PYTHON_VERSION" -conda create --yes --prefix /home/travis/envs python=$PYTHON_VERSION -conda activate /home/travis/envs -conda install sphinx numpy flask - echo "Creating ExoCTK conda environment for Python $PYTHON_VERSION" conda env update -f "env/environment-${PYTHON_VERSION}.yml" || exit 1 export CONDA_ENV=exoctk-$PYTHON_VERSION diff --git a/env/environment-3.10.yml b/env/environment-3.10.yml new file mode 100644 index 00000000..0f456da7 --- /dev/null +++ b/env/environment-3.10.yml @@ -0,0 +1,52 @@ +name: exoctk-3.10 +channels: + - conda-forge + - defaults +dependencies: + - astropy>=5.3.1 + - bokeh>=3.2.2 + - cython>=0.29.35 + - docopt>=0.6.2 + - docutils>=0.16 + - flake8>=6.0.0 + - flask>=2.2.2 + - ipython>=8.12.0 + - jupyter>=1.0.0 + - matplotlib>=3.7.1 + - numpydoc>=1.5.0 + - pandas>=2.0.3 + - paramiko>=2.8.1 + - pip>=23.1.2 + - pytest>=7.3.1 + - python=3.10.11 + - pyyaml>=6.0.0 + - scp>=0.14.1 + - sphinx>=5.0.2 + - sqlalchemy>=1.4.39 + - wtforms>=2.3.3 + - pip: + - asteval>=0.9.31 + - awscli>=1.29.4 + - bandit>=1.7.5 + - batman-package>=2.4.8 + - bibtexparser>=1.4.0 + - boto3>=1.28.5 + - corner>=2.2.2 + - docutils>=0.16 + - flask_wtf>=1.1.1 + - h5py>=3.9.0 + - hotsoss>=0.1.9 + - gunicorn>=21.1.0 + - numpy>=1.25.1 + - platon>=5.4 + - pyerfa>=2.0.0 + - pysiaf>=0.20.0 + - pysynphot>=2.0.0 + - pyvo>=1.4.1 + - regions>=0.7 + - scipy>=1.11.1 + - sphinx_astropy>=1.9.1 + - svo-filters>=0.4.4 + - werkzeug==2.0.3 + - jwst_gtvt>=1.0.0 + - astroquery>=0.4.6 diff --git a/env/environment-3.11.yml b/env/environment-3.11.yml new file mode 100644 index 00000000..4206bd96 --- /dev/null +++ b/env/environment-3.11.yml @@ -0,0 +1,52 @@ +name: exoctk-3.11 +channels: + - conda-forge + - defaults +dependencies: + - astropy>=5.3.1 + - bokeh>=3.2.2 + - cython>=0.29.35 + - docopt>=0.6.2 + - docutils>=0.16 + - flake8>=6.0.0 + - flask>=2.2.2 + - ipython>=8.12.0 + - jupyter>=1.0.0 + - matplotlib>=3.7.1 + - numpydoc>=1.5.0 + - pandas>=2.0.3 + - paramiko>=2.8.1 + - pip>=23.1.2 + - pytest>=7.3.1 + - python=3.11.0 + - pyyaml>=6.0.0 + - scp>=0.14.1 + - sphinx>=5.0.2 + - sqlalchemy>=1.4.39 + - wtforms>=2.3.3 + - pip: + - asteval>=0.9.31 + - awscli>=1.29.4 + - bandit>=1.7.5 + - batman-package>=2.4.8 + - bibtexparser>=1.4.0 + - boto3>=1.28.5 + - corner>=2.2.2 + - docutils>=0.16 + - flask_wtf>=1.1.1 + - h5py>=3.9.0 + - hotsoss>=0.1.9 + - gunicorn>=21.1.0 + - numpy>=1.25.1 + - platon>=5.4 + - pyerfa>=2.0.0 + - pysiaf>=0.20.0 + - pysynphot>=2.0.0 + - pyvo>=1.4.1 + - regions>=0.7 + - scipy>=1.11.1 + - sphinx_astropy>=1.9.1 + - svo-filters>=0.4.4 + - werkzeug==2.0.3 + - jwst_gtvt>=1.0.0 + - astroquery>=0.4.6 diff --git a/env/environment-3.7.yml b/env/environment-3.7.yml deleted file mode 100644 index cf408432..00000000 --- a/env/environment-3.7.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: exoctk-3.7 -channels: - - defaults - - http://ssb.stsci.edu/astroconda -dependencies: - - astropy<4.1 - - bokeh=2.4.2 - - boto3 - - cython=0.29.24 - - docopt=0.6.2 - - docutils=0.17.1 - - flake8=3.9.0 - - flask=1.1.2 - - gunicorn=20.1.0 - - h5py=3.6.0 - - ipython=7.29.0 - - jupyter=1.0.0 - - matplotlib=3.1.3 - - numpy=1.19.2 - - numpydoc=1.1.0 - - pandas=1.3.2 - - paramiko=2.7.2 - - pip=20.3.3 - - pytest=6.2.3 - - python=3.7.1 - - pyyaml - - scipy=1.6.2 - - scp=0.14.1 - - sphinx=4.2.0 - - sqlalchemy=1.4.27 - - twine=3.4.1 - - wtforms=2.3.3 - - pip: - - asteval==0.9.25 - - awscli - - bandit==1.7.1 - - batman-package==2.4.8 - - bibtexparser==1.2.0 - - corner==2.2.1 - - ddtrace==0.57.0 - - flask_wtf==1.0.0 - - lmfit==1.0.1 - - platon==4.0 - - pysiaf==0.10.0 - - pysynphot==2.0.0 - - PyYAML>=5.4 - - sphinx_astropy==1.6.0 - - svo-filters==0.4.1 - - werkzeug>=2.0 - - git+https://github.com/spacetelescope/jwst_gtvt.git@cd6bc76f66f478eafbcc71834d3e735c73e03ed5 - - git+https://github.com/astropy/astroquery.git@ccc96185beeff86f3a12a31a00a801afcebe1dbe \ No newline at end of file diff --git a/env/environment-3.8.yml b/env/environment-3.8.yml deleted file mode 100644 index f37c13cb..00000000 --- a/env/environment-3.8.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: exoctk-3.8 -channels: - - defaults - - http://ssb.stsci.edu/astroconda -dependencies: - - astropy<4.1 - - bokeh=2.3.3 - - boto3 - - cython=0.29.24 - - docopt=0.6.2 - - docutils=0.17.1 - - flake8=3.9.0 - - flask=1.1.2 - - gunicorn=20.1.0 - - h5py=3.6.0 - - ipython=7.29.0 - - jupyter=1.0.0 - - matplotlib=3.3.4 - - numpy=1.19.2 - - numpydoc=1.1.0 - - pandas=1.3.2 - - paramiko=2.7.2 - - pip=20.3.3 - - pytest=6.2.4 - - python=3.8.5 - - pyyaml - - scipy=1.6.2 - - scp=0.14.1 - - sphinx=4.2.0 - - sqlalchemy=1.4.27 - - twine=3.2.0 - - wtforms=2.3.3 - - pip: - - asteval==0.9.25 - - awscli - - bandit==1.7.1 - - batman-package==2.4.8 - - bibtexparser==1.2.0 - - corner==2.2.1 - - ddtrace==0.57.0 - - flask_wtf==1.0.0 - - lmfit==1.0.2 - - platon==4.0 - - pysiaf==0.10.0 - - pysynphot==2.0.0 - - sphinx_astropy==1.6.0 - - svo-filters==0.4.1 - - werkzeug>=2.0 - - git+https://github.com/spacetelescope/jwst_gtvt.git@cd6bc76f66f478eafbcc71834d3e735c73e03ed5 - - git+https://github.com/astropy/astroquery.git@ccc96185beeff86f3a12a31a00a801afcebe1dbe \ No newline at end of file diff --git a/env/environment-3.9.yml b/env/environment-3.9.yml index 0f357fd8..44f0813b 100644 --- a/env/environment-3.9.yml +++ b/env/environment-3.9.yml @@ -1,49 +1,53 @@ name: exoctk-3.9 channels: + - conda-forge - defaults - - http://ssb.stsci.edu/astroconda dependencies: - - astropy=4.2 - - bokeh=2.3.3 - - boto3 - - cython=0.29.22 - - docopt=0.6.2 - - flake8=3.9.0 - - flask=1.1.2 - - h5py=3.6.0 - - ipython=7.29.0 - - jupyter=1.0.0 - - matplotlib=3.3.4 - - numpy=1.19.2 - - numpydoc=1.1.0 - - pandas=1.3.2 - - paramiko=2.7.2 - - pip=20.3.3 - - pytest=6.2.4 + - astropy>=5.3.1 + - bokeh>=3.2.2 + - cython>=0.29.35 + - docopt>=0.6.2 + - docutils>=0.16 + - flake8>=6.0.0 + - flask>=2.2.2 + - ipython>=8.12.0 + - jupyter>=1.0.0 + - matplotlib>=3.7.1 + - numpydoc>=1.5.0 + - pandas>=2.0.3 + - paramiko>=2.8.1 + - pip>=23.1.2 + - pytest>=7.3.1 - python=3.9.1 - - pyyaml - - scipy=1.6.2 - - scp=0.14.1 - - sphinx=4.2.0 - - sqlalchemy=1.4.27 - - twine=3.2.0 - - wtforms=2.3.3 + - pyyaml>=6.0.0 + - scp>=0.14.1 + - sphinx>=5.0.2 + - sqlalchemy>=1.4.39 + - wtforms>=2.3.3 - pip: - - asteval==0.9.25 - - awscli - - bandit==1.7.1 - - batman-package==2.4.8 - - bibtexparser==1.2.0 - - corner==2.2.1 - - ddtrace==0.57.0 - - docutils==0.17.1 - - flask_wtf==1.0.0 - - gunicorn==20.0.4 - - platon==5.3 - - pysiaf==0.10.0 - - pysynphot==2.0.0 - - sphinx_astropy==1.6.0 - - svo-filters==0.4.1 - - werkzeug>=2.0 - - git+https://github.com/spacetelescope/jwst_gtvt.git@cd6bc76f66f478eafbcc71834d3e735c73e03ed5 - - git+https://github.com/astropy/astroquery.git@ccc96185beeff86f3a12a31a00a801afcebe1dbe \ No newline at end of file + - asteval>=0.9.31 + - awscli>=1.29.4 + - bandit>=1.7.5 + - batman-package>=2.4.8 + - bibtexparser>=1.4.0 + - boto3>=1.28.5 + - corner>=2.2.2 + - docutils>=0.16 + - flask_wtf>=1.1.1 + - h5py>=3.9.0 + - hotsoss>=0.1.9 + - gunicorn>=21.1.0 + - markupsafe==2.0.1 + - numpy>=1.25.1 + - platon>=5.4 + - pyerfa>=2.0.0 + - pysiaf>=0.20.0 + - pysynphot>=2.0.0 + - pyvo>=1.4.1 + - regions>=0.7 + - scipy>=1.11.1 + - sphinx_astropy>=1.9.1 + - svo-filters>=0.4.4 + - werkzeug==2.0.3 + - jwst_gtvt>=1.0.0 + - astroquery>=0.4.6 \ No newline at end of file diff --git a/exoctk/contam_visibility/__init__.py b/exoctk/contam_visibility/__init__.py index d21dd7d9..7feea2ad 100644 --- a/exoctk/contam_visibility/__init__.py +++ b/exoctk/contam_visibility/__init__.py @@ -6,7 +6,6 @@ from . import ephemeris_old2x from . import visibilityPA from . import contamination_figure -from . import resolve from . import time_extensionsx from . import f_visibilityPeriods from . import quaternionx diff --git a/exoctk/contam_visibility/contamination_figure.py b/exoctk/contam_visibility/contamination_figure.py index 56df13a5..309ac6a7 100755 --- a/exoctk/contam_visibility/contamination_figure.py +++ b/exoctk/contam_visibility/contamination_figure.py @@ -1,14 +1,17 @@ import os import sys +from itertools import groupby, count from astropy.io import fits from bokeh.layouts import gridplot -from bokeh.models import Range1d, LinearColorMapper +from bokeh.models import Range1d, LinearColorMapper, CrosshairTool, HoverTool, Span from bokeh.palettes import PuBu from bokeh.plotting import figure import numpy as np from . import visibilityPA as vpa +from ..utils import fill_between + EXOCTK_DATA = os.environ.get('EXOCTK_DATA') if not EXOCTK_DATA: @@ -20,230 +23,140 @@ '"ExoCTK Data Download" button on the ExoCTK website, or by using ' 'the exoctk.utils.download_exoctk_data() function.') TRACES_PATH = None + LAM_FILE = None else: TRACES_PATH = os.path.join(EXOCTK_DATA, 'exoctk_contam', 'traces') + LAM_FILE = os.path.join(TRACES_PATH, 'NIRISS_old', 'lambda_order1-2.txt') -DISP_NIRCAM = 0.001 # microns -LAM0_NIRCAM322W2 = 2.369 -LAM1_NIRCAM322W2 = 4.417 -LAM0_NIRCAM444W = 3.063 -LAM1_NIRCAM444W = 5.111 - - -def contam(cube, instrument, targetName='noName', paRange=[0, 360], - badPAs=np.asarray([]), tmpDir="", fig='', to_html=True): - - lam_file = os.path.join(TRACES_PATH, 'NIRISS', 'lambda_order1-2.txt') - ypix, lamO1, lamO2 = np.loadtxt(lam_file, unpack=True) - - rows, cols = cube.shape[1], cube.shape[2] +disp_nircam = 0.001 # microns +lam0_nircam322w2 = 2.369 +lam1_nircam322w2 = 4.417 +lam0_nircam444w = 3.063 +lam1_nircam444w = 5.111 - PAmin, PAmax = paRange[0], paRange[1] - PA = np.arange(PAmin, PAmax, 1) - # Generate the contam figure - if instrument == 'NIRISS': - contamO1, contamO2 = nirissContam(cube) - elif (instrument == 'NIRCam F322W2') or (instrument == 'NIRCam F444W'): - contamO1 = nircamContam(cube, instrument) - elif instrument == 'MIRI': - contamO1 = miriContam(cube) - - TOOLS = 'pan, box_zoom, crosshair, reset, hover' - - bad_PA_color = '#dddddd' - bad_PA_alpha = 0.7 - dPA = 1 - - # Order 1~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - # Contam plot - if instrument == 'NIRISS': - xlim0 = lamO1.min() - xlim1 = lamO1.max() - elif instrument == 'NIRCam F322W2': - xlim0 = LAM0_NIRCAM322W2 - xlim1 = LAM1_NIRCAM322W2 - elif instrument == 'NIRCam F444W': - xlim0 = LAM0_NIRCAM444W - xlim1 = LAM1_NIRCAM444W - elif instrument == 'MIRI': - xlim0 = 5 - xlim1 = 12 +def nirissContam(cube, paRange=[0, 360], lam_file=LAM_FILE): + """ Generates the contamination figure that will be plotted on the website + for NIRISS SOSS. + """ + # Get data from FITS file + if isinstance(cube, str): + hdu = fits.open(cubeName) + cube = hdu[0].data + hdu.close() - ylim0 = PAmin - 0.5 - ylim1 = PAmax + 0.5 - color_mapper = LinearColorMapper(palette=PuBu[8][::-1][2:], - low=-4, high=1) - color_mapper.low_color = 'white' - color_mapper.high_color = 'black' + # Pull out the target trace and cube of neighbor traces + trace1 = cube[0, :, :] + trace2 = cube[1, :, :] + cube = cube[2:, :, :] - orders = 'Orders 1 & 2' if instrument == 'NIRCam' else 'Order 1' - s2 = figure( - tools=TOOLS, width=500, height=500, title='{} {} Contamination with {}'.format( - orders, targetName, instrument), x_range=Range1d( - xlim0, xlim1), y_range=Range1d( - ylim0, ylim1)) - - contamO1 = contamO1 if 'NIRCam' in instrument else contamO1.T - contamO1 = np.fliplr(contamO1) if ( - instrument == 'MIRI') or ( - instrument == 'NIRCam F322W2') else contamO1 - fig_data = np.log10(np.clip(contamO1, 1.e-10, 1.) - ) # [:, :361] # might this - # index have somethig to - # do w the choppiness - # of o1 in all instruments + plotPAmin, plotPAmax = paRange - # Begin plotting ~~~~~~~~~~~~~~~~~~~~~~~~ + # Start calculations + ypix, lamO1, lamO2 = np.loadtxt(lam_file, unpack=True) - s2.image([fig_data], x=xlim0, y=ylim0, dw=xlim1 - xlim0, dh=ylim1 - ylim0, - color_mapper=color_mapper) - s2.xaxis.axis_label = 'Wavelength (um)' - if instrument != 'NIRISS': - s2.yaxis.axis_label = 'Aperture Position Angle (degrees)' + nPA = cube.shape[0] + rows = cube.shape[1] + cols = cube.shape[2] + dPA = 360 // nPA + PA = np.arange(nPA) * dPA - # Add bad PAs - bad_PA_color = '#555555' - bad_PA_alpha = 0.6 - # for ybad0, ybad1 in badPA: - if len(badPAs) > 0: + contamO1 = np.zeros([rows, nPA]) + contamO2 = np.zeros([rows, nPA]) - tops, bottoms, lefts, rights = [], [], [], [] - for idx in range(0, len(badPAs)): - PAgroup = badPAs[idx] - top_idx = np.max(PAgroup) - bot_idx = np.min(PAgroup) + low_lim_col = 20 + high_lim_col = 41 - tops.append(top_idx) - bottoms.append(bot_idx) - lefts.append(xlim0) - rights.append(xlim1) + for row in np.arange(rows): + # Contamination for order 1 of target trace + i = np.argmax(trace1[row, :]) + tr = trace1[row, i - low_lim_col:i + high_lim_col] + w = tr / np.sum(tr**2) + ww = np.tile(w, nPA).reshape([nPA, tr.size]) + contamO1[row, :] = np.sum( + cube[:, row, i - low_lim_col:i + high_lim_col] * ww, axis=1) - s2.quad(top=tops, bottom=bottoms, - left=lefts, right=rights, - color=bad_PA_color, alpha=bad_PA_alpha) + # Contamination for order 2 of target trace + if lamO2[row] < 0.6: + continue + i = np.argmax(trace2[row, :]) + tr = trace2[row, i - 20:i + 41] + w = tr / np.sum(tr**2) + ww = np.tile(w, nPA).reshape([nPA, tr.size]) + contamO2[row, :] = np.sum(cube[:, row, i - 20:i + 41] * ww, axis=1) - # Line plot - # ax = 1 if 'NIRCam' in instrument else 0 - channels = cols if 'NIRCam' in instrument else rows - s3 = figure(tools=TOOLS, width=150, height=500, - x_range=Range1d(0, 100), y_range=s2.y_range, title=None) + return contamO1, contamO2 - try: - s3.line(100 * np.sum(contamO1 >= 0.001, axis=1) / channels, PA - dPA / 2, - line_color='blue', legend_label='> 0.001') - s3.line(100 * np.sum(contamO1 >= 0.01, axis=1) / channels, PA - dPA / 2, - line_color='green', legend_label='> 0.01') - except AttributeError: - s3.line(100 * np.sum(contamO1 >= 0.001, axis=1) / channels, PA - dPA / 2, - line_color='blue', legend='> 0.001') - s3.line(100 * np.sum(contamO1 >= 0.01, axis=1) / channels, PA - dPA / 2, - line_color='green', legend='> 0.01') - s3.xaxis.axis_label = '% channels contam.' - s3.yaxis.major_label_text_font_size = '0pt' +def nircamContam(cube, paRange=[0, 360]): + """ Generates the contamination figure that will be plotted on the website + for NIRCam Grism Time Series mode. - # ~~~~~~ Order 2 ~~~~~~ - # Contam plot - if instrument == 'NIRISS': - xlim0 = lamO2.min() - xlim1 = lamO2.max() - ylim0 = PA.min() - 0.5 * dPA - ylim1 = PA.max() + 0.5 * dPA - xlim0 = 0.614 - s5 = figure( - tools=TOOLS, - width=500, - height=500, - title='Order 2 {} Contamination with {}'.format( - targetName, - instrument), - x_range=Range1d( - xlim0, - xlim1), - y_range=s2.y_range) - fig_data = np.log10(np.clip(contamO2.T, 1.e-10, 1.))[:, 300:] - s5.image( - [fig_data], - x=xlim0, - y=ylim0, - dw=xlim1 - xlim0, - dh=ylim1 - ylim0, - color_mapper=color_mapper) - # s5.yaxis.major_label_text_font_size = '0pt' - s5.xaxis.axis_label = 'Wavelength (um)' - s5.yaxis.axis_label = 'Aperture Position Angle (degrees)' + Parameters + ---------- + cube : arr or str + A 3D array of the simulated field at every Aperture Position Angle (APA). + The shape of the cube is (361, subY, subX). + or + The name of an HDU .fits file sthat has the cube. - if len(badPAs) > 0: + Returns + ------- + bokeh plot + """ + # Get data from FITS file + if isinstance(cube, str): + hdu = fits.open(cubeName) + cube = hdu[0].data + hdu.close() - tops, bottoms, lefts, rights = [], [], [], [] - for idx in range(0, len(badPAs)): - PAgroup = badPAs[idx] - top_idx = np.max(PAgroup) - bot_idx = np.min(PAgroup) + # Pull out the target trace and cube of neighbor traces + targ = cube[0, :, :] # target star order 1 trace + # neighbor star order 1 and 2 traces in all the angles + cube = cube[1:, :, :] - tops.append(top_idx) - bottoms.append(bot_idx) - lefts.append(xlim0) - rights.append(xlim1) + # Remove background values < 1 as it can blow up contamination + targ = np.where(targ < 1, 0, targ) - s5.quad(top=tops, bottom=bottoms, - left=lefts, right=rights, - color=bad_PA_color, alpha=bad_PA_alpha) + PAmin, PAmax = paRange[0], paRange[1] + PArange = np.arange(PAmin, PAmax, 1) - # Line plot - s6 = figure(tools=TOOLS, width=150, height=500, y_range=s2.y_range, - x_range=Range1d(100, 0), title=None) + nPA, rows, cols = cube.shape[0], cube.shape[1], cube.shape[2] - try: - s6.line(100 * np.sum(contamO2 >= 0.001, axis=0) / rows, PA - dPA / 2, - line_color='blue', legend_label='> 0.001') - s6.line(100 * np.sum(contamO2 >= 0.01, axis=0) / rows, PA - dPA / 2, - line_color='green', legend_label='> 0.01') - except AttributeError: - s6.line(100 * np.sum(contamO2 >= 0.001, axis=0) / rows, PA - dPA / 2, - line_color='blue', legend='> 0.001') - s6.line(100 * np.sum(contamO2 >= 0.01, axis=0) / rows, PA - dPA / 2, - line_color='green', legend='> 0.01') + contamO1 = np.zeros([nPA, cols]) - s6.xaxis.axis_label = '% channels contam.' - s6.yaxis.major_label_text_font_size = '0pt' + # the width of the trace (in Y-direction for NIRCam GTS) + peak = targ.max() + low_lim_row = np.where(targ > 0.0001 * peak)[0].min() + high_lim_row = np.where(targ > 0.0001 * peak)[0].max() - if len(badPAs) > 0: + # the length of the trace (in X-direction for NIRCam GTS) + targ_trace_start = np.where(targ > 0.0001 * peak)[1].min() + targ_trace_stop = np.where(targ > 0.0001 * peak)[1].max() - tops, bottoms, lefts, rights = [], [], [], [] - for idx in range(0, len(badPAs)): - PAgroup = badPAs[idx] - top_idx = np.max(PAgroup) - bot_idx = np.min(PAgroup) + # Begin contam calculation at each channel (column) X + for X in np.arange(cols): + if (X < targ_trace_start) or (X > targ_trace_stop): + continue - tops.append(top_idx) - bottoms.append(bot_idx) - lefts.append(0) - rights.append(100) + peakY = np.argmax(targ[:, X]) + TOP, BOT = peakY + high_lim_row, peakY - low_lim_row - s3.quad(top=tops, bottom=bottoms, - left=lefts, right=rights, - color=bad_PA_color, alpha=bad_PA_alpha) - if instrument == 'NIRISS': - s6.quad(top=tops, bottom=bottoms, - left=rights, right=lefts, - color=bad_PA_color, alpha=bad_PA_alpha) + tr = targ[BOT:TOP, X] - # ~~~~~~ Plotting ~~~~~~ + # calculate weights + wt = tr / np.sum(tr**2) + ww = np.tile(wt, nPA).reshape([nPA, tr.size]) - if instrument != 'NIRISS': - fig = gridplot(children=[[s2, s3]]) - else: - fig = gridplot(children=[[s6, s5, s2, s3]]) + contamO1[:, X] = np.sum(cube[:, BOT:TOP, X] * ww, axis=1) - return fig # , contamO1 + contamO1 = contamO1[:, targ_trace_start:targ_trace_stop] + return contamO1 def miriContam(cube, paRange=[0, 360]): - """ Generates the contamination figure that will be plotted on the - website for MIRI LRS. + """ Generates the contamination figure that will be plotted on the website + for MIRI LRS. """ # Get data from FITS file if isinstance(cube, str): @@ -258,7 +171,12 @@ def miriContam(cube, paRange=[0, 360]): # Remove background values < 1 as it can blow up contamination targ = np.where(targ < 1, 0, targ) - nPA, rows = cube.shape[0], cube.shape[1] + + PAmin, PAmax = paRange[0], paRange[1] + PArange = np.arange(PAmin, PAmax, 1) + + nPA, rows, cols = cube.shape[0], cube.shape[1], cube.shape[2] + contamO1 = np.zeros([rows, nPA]) # the width of the trace (in Y-direction for NIRCam GTS) @@ -282,11 +200,13 @@ def miriContam(cube, paRange=[0, 360]): # calculate weights wt = tr / np.sum(tr**2) + ww = np.tile(wt, nPA).reshape([nPA, tr.size]) + contamO1[Y, :] = np.sum(cube[:, Y, LEFT:RIGHT] * wt, where=~np.isnan(cube[:, Y, LEFT:RIGHT] * wt), axis=1) - # target = np.sum(cube[0, Y, LEFT:RIGHT], axis=0) + #target = np.sum(cube[0, Y, LEFT:RIGHT], axis=0) # contamO1[Y, :] = np.sum(cube[:, Y, LEFT:RIGHT]*ww, # where=~np.isnan(cube[:, Y, LEFT:RIGHT]), @@ -295,124 +215,166 @@ def miriContam(cube, paRange=[0, 360]): return contamO1 -def nircamContam(cube, instrument, paRange=[0, 360]): - """ Generates the contamination figure that will be plotted on the - website for NIRCam Grism Time Series mode. +def contam(cube, instrument, targetName='noName', paRange=[0, 360], badPAs=[]): - PARAMETERS - ---------- - cube : arr or str - A 3D array of the simulated field at every Aperture Position - Angle (APA). The shape of the cube is (361, subY, subX). - or - The name of an HDU .fits file sthat has the cube. + rows, cols = cube.shape[1], cube.shape[2] - instrument : str - The name of the instrument + what filter is being used. For - NIRCam the options are: 'NIRCam F322W2', 'NIRCam F444W' + PAmin, PAmax = paRange[0], paRange[1] + PA = np.arange(PAmin, PAmax, 1) - RETURNS - ------- - bokeh plot - """ - # Get data from FITS file - if isinstance(cube, str): - hdu = fits.open(cubeName) - cube = hdu[0].data - hdu.close() + # Generate the contam figure + if instrument in ['NIS_SUBSTRIP256', 'NIS_SUBSTRIP96']: + contamO1, contamO2 = nirissContam(cube, lam_file=LAM_FILE) + ypix, lamO1, lamO2 = np.loadtxt(LAM_FILE, unpack=True) + xlim0 = lamO1.min() + xlim1 = lamO1.max() + elif instrument == 'NRCA5_GRISM256_F444W': + contamO1 = nircamContam(cube) + xlim0 = lam0_nircam444w + xlim1 = lam1_nircam444w + elif instrument == 'NRCA5_GRISM256_F322W2': + contamO1 = nircamContam(cube) + xlim0 = lam0_nircam322w2 + xlim1 = lam1_nircam322w2 + elif instrument == 'MIRIM_SLITLESSPRISM': + contamO1 = miriContam(cube) + xlim0 = 5 + xlim1 = 12 - # Pull out the target trace and cube of neighbor traces - targ = cube[0, :, :] # target star order 1 trace - # neighbor star order 1 and 2 traces in all the angles - cube = cube[1:, :, :] + TOOLS = 'pan, box_zoom, reset' + dPA = 1 - # Remove background values < 1 as it can blow up contamination - targ = np.where(targ < 1, 0, targ) - nPA, cols = cube.shape[0], cube.shape[2] - contamO1 = np.zeros([nPA, cols]) + # Order 1~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - # the width of the trace (in Y-direction for NIRCam GTS) - peak = targ.max() - low_lim_row = np.where(targ > 0.0001 * peak)[0].min() - high_lim_row = np.where(targ > 0.0001 * peak)[0].max() + # Contam plot + ylim0 = PAmin - 0.5 + ylim1 = PAmax + 0.5 + color_mapper = LinearColorMapper(palette=PuBu[8][::-1][2:], low=-4, high=1) + color_mapper.low_color = 'white' + color_mapper.high_color = 'black' + width = Span(dimension="width", line_width=1) + height = Span(dimension="height", line_width=1) - # the length of the trace (in X-direction for NIRCam GTS) - targ_trace_start = np.where(targ > 0.0001 * peak)[1].min() - targ_trace_stop = np.where(targ > 0.0001 * peak)[1].max() + orders = 'Orders 1 & 2' if instrument.startswith('NRCA') else 'Order 1' + s2 = figure(tools=TOOLS, width=800, height=600, title='{} {} Contamination with {}'.format(orders, targetName, instrument), x_range=Range1d(xlim0, xlim1), y_range=Range1d(ylim0, ylim1)) + o1_crosshair = CrosshairTool(overlay=[width, height]) + s2.add_tools(o1_crosshair) - # Begin contam calculation at each channel (column) X - for X in np.arange(cols): - if (X < targ_trace_start) or (X > targ_trace_stop): - continue + contamO1 = contamO1 if 'NRCA' in instrument else contamO1.T + contamO1 = np.fliplr(contamO1) if (instrument == 'MIRIM_SLITLESSPRISM') or (instrument == 'NRCA5_GRISM256_F322W2') else contamO1 + fig_data = np.log10(np.clip(contamO1, 1.e-10, 1.)) - peakY = np.argmax(targ[:, X]) - TOP, BOT = peakY + high_lim_row, peakY - low_lim_row + # Begin plotting ~~~~~~~~~~~~~~~~~~~~~~~~ - tr = targ[BOT:TOP, X] + s2.image([fig_data], x=xlim0, y=ylim0, dw=xlim1 - xlim0, dh=ylim1 - ylim0, color_mapper=color_mapper) + s2.xaxis.axis_label = 'Wavelength (um)' + if not instrument.startswith('NIS'): + s2.yaxis.axis_label = 'Aperture Position Angle (degrees)' - # calculate weights - wt = tr / np.sum(tr**2) - ww = np.tile(wt, nPA).reshape([nPA, tr.size]) + # Line plot + #ax = 1 if 'NIRCam' in instrument else 0 + channels = cols if 'NRCA' in instrument else rows + s3 = figure(tools=TOOLS, width=150, height=600, x_range=Range1d(0, 100), y_range=s2.y_range, title=None) + s3.add_tools(o1_crosshair) - contamO1[:, X] = np.sum(cube[:, BOT:TOP, X] * ww, axis=1) + try: + s3.line(100 * np.sum(contamO1 >= 0.001, axis=1) / channels, PA - dPA / 2, line_color='blue', legend_label='> 0.001') + s3.line(100 * np.sum(contamO1 >= 0.01, axis=1) / channels, PA - dPA / 2, line_color='green', legend_label='> 0.01') + except AttributeError: + s3.line(100 * np.sum(contamO1 >= 0.001, axis=1) / channels, PA - dPA / 2, line_color='blue', legend='> 0.001') + s3.line(100 * np.sum(contamO1 >= 0.01, axis=1) / channels, PA - dPA / 2, line_color='green', legend='> 0.01') - contamO1 = contamO1[:, targ_trace_start:targ_trace_stop] - return contamO1 + s3.xaxis.axis_label = '% channels contam.' + s3.yaxis.major_label_text_font_size = '0pt' + s3.ygrid.grid_line_color = None + # Add shaded region for bad PAs + bad_PA_color = '#555555' + bad_PA_alpha = 0.6 + if len(badPAs) > 0: -def nirissContam(cube, paRange=[0, 360]): - """ Generates the contamination figure that will be plotted on the - website for NIRISS SOSS. - """ - # Get data from FITS file - if isinstance(cube, str): - hdu = fits.open(cubeName) - cube = hdu[0].data - hdu.close() + # Group bad PAs + badPA_groups = [list(map(int, g)) for _, g in groupby(badPAs, lambda n, c=count(): n-next(c))] - # Pull out the target trace and cube of neighbor traces - trace1 = cube[0, :, :] - trace2 = cube[1, :, :] - cube = cube[2:, :, :] + tops, bottoms, lefts, rights, lefts_line, rights_line = [], [], [], [], [], [] + for idx in range(0, len(badPA_groups)): + PAgroup = badPA_groups[idx] + top_idx = np.max(PAgroup) + bot_idx = np.min(PAgroup) - plotPAmin, plotPAmax = paRange + tops.append(top_idx) + bottoms.append(bot_idx) + lefts.append(xlim0) + rights.append(xlim1) + lefts_line.append(0) + rights_line.append(100) - # Start calculations - if not TRACES_PATH: - return None - lam_file = os.path.join(TRACES_PATH, 'NIRISS', 'lambda_order1-2.txt') - ypix, lamO1, lamO2 = np.loadtxt(lam_file, unpack=True) + s2.quad(top=tops, bottom=bottoms, left=lefts, right=rights, color=bad_PA_color, alpha=bad_PA_alpha) + s3.quad(top=tops, bottom=bottoms, left=lefts_line, right=rights_line, color=bad_PA_color, alpha=bad_PA_alpha) - nPA = cube.shape[0] - rows = cube.shape[1] - cols = cube.shape[2] - print('cols ', cols) + # ~~~~~~ Order 2 ~~~~~~ - contamO1 = np.zeros([rows, nPA]) - contamO2 = np.zeros([rows, nPA]) + # Contam plot + if instrument.startswith('NIS'): + xlim0 = lamO2.min() + xlim1 = lamO2.max() + ylim0 = PA.min() - 0.5 * dPA + ylim1 = PA.max() + 0.5 * dPA + xlim0 = 0.614 + s5 = figure(tools=TOOLS, width=800, height=600, title='Order 2 {} Contamination with {}'.format(targetName, instrument), x_range=Range1d(xlim0, xlim1), y_range=s2.y_range) + fig_data = np.log10(np.clip(contamO2.T, 1.e-10, 1.))[:, 300:] + s5.image([fig_data], x=xlim0, y=ylim0, dw=xlim1 - xlim0, dh=ylim1 - ylim0, color_mapper=color_mapper) + s5.xaxis.axis_label = 'Wavelength (um)' + s5.yaxis.axis_label = 'Aperture Position Angle (degrees)' + o2_crosshair = CrosshairTool(overlay=[width, height]) + s5.add_tools(o2_crosshair) - low_lim_col = 20 - high_lim_col = 41 + # Line plot + s6 = figure(tools=TOOLS, width=150, height=600, y_range=s2.y_range, x_range=Range1d(0, 100), title=None) + s6.add_tools(o2_crosshair) - for row in np.arange(rows): - # Contamination for order 1 of target trace - i = np.argmax(trace1[row, :]) - tr = trace1[row, i - low_lim_col:i + high_lim_col] - w = tr / np.sum(tr**2) - ww = np.tile(w, nPA).reshape([nPA, tr.size]) - contamO1[row, :] = np.sum( - cube[:, row, i - low_lim_col:i + high_lim_col] * ww, axis=1) + try: + s6.line(100 * np.sum(contamO2 >= 0.001, axis=0) / rows, PA - dPA / 2, line_color='blue', legend_label='> 0.001') + s6.line(100 * np.sum(contamO2 >= 0.01, axis=0) / rows, PA - dPA / 2, line_color='green', legend_label='> 0.01') + except AttributeError: + s6.line(100 * np.sum(contamO2 >= 0.001, axis=0) / rows, PA - dPA / 2, line_color='blue', legend='> 0.001') + s6.line(100 * np.sum(contamO2 >= 0.01, axis=0) / rows, PA - dPA / 2, line_color='green', legend='> 0.01') - # Contamination for order 2 of target trace - if lamO2[row] < 0.6: - continue - i = np.argmax(trace2[row, :]) - tr = trace2[row, i - 20:i + 41] - w = tr / np.sum(tr**2) - ww = np.tile(w, nPA).reshape([nPA, tr.size]) - contamO2[row, :] = np.sum(cube[:, row, i - 20:i + 41] * ww, axis=1) + s6.xaxis.axis_label = '% channels contam.' + s6.yaxis.major_label_text_font_size = '0pt' + s6.ygrid.grid_line_color = None - return contamO1, contamO2 + # Add shaded region for bad PAs + if len(badPAs) > 0: + + # Group bad PAs + badPA_groups = [list(map(int, g)) for _, g in groupby(badPAs, lambda n, c=count(): n - next(c))] + + tops, bottoms, lefts, rights, lefts_line, rights_line = [], [], [], [], [], [] + for idx in range(0, len(badPA_groups)): + PAgroup = badPA_groups[idx] + top_idx = np.max(PAgroup) + bot_idx = np.min(PAgroup) + + tops.append(top_idx) + bottoms.append(bot_idx) + lefts.append(xlim0) + rights.append(xlim1) + lefts_line.append(0) + rights_line.append(100) + + s5.quad(top=tops, bottom=bottoms, left=lefts, right=rights, color=bad_PA_color, alpha=bad_PA_alpha) + s6.quad(top=tops, bottom=bottoms, left=lefts_line, right=rights_line, color=bad_PA_color, + alpha=bad_PA_alpha) + + # ~~~~~~ Plotting ~~~~~~ + + if instrument.startswith('NIS'): + fig = gridplot(children=[[s2, s3], [s5, s6]]) + else: + fig = gridplot(children=[[s2, s3]]) + + return fig # , contamO1 if __name__ == "__main__": diff --git a/exoctk/contam_visibility/field_simulator.py b/exoctk/contam_visibility/field_simulator.py index 71821751..cee46c7e 100755 --- a/exoctk/contam_visibility/field_simulator.py +++ b/exoctk/contam_visibility/field_simulator.py @@ -1,753 +1,1089 @@ +from copy import copy +from functools import partial import glob +from multiprocessing import pool, cpu_count import os -import pysiaf +import re +import time +from pkg_resources import resource_filename import astropy.coordinates as crd from astropy.io import fits from astroquery.irsa import Irsa import astropy.units as u +from astropy.stats import sigma_clip +from astropy.table import Table +from astropy.coordinates import SkyCoord +from astroquery.irsa import Irsa +from astroquery.vizier import Vizier +from astroquery.xmatch import XMatch +from astroquery.gaia import Gaia +from astropy.io import fits +from bokeh.plotting import figure, show +from bokeh.layouts import gridplot, column +from bokeh.models import Range1d, LinearColorMapper, LogColorMapper, Label, ColorBar, ColumnDataSource, HoverTool, Slider, CustomJS, VArea, CrosshairTool, TapTool, OpenURL, Span, Legend +from bokeh.palettes import PuBu, Spectral6 +from bokeh.transform import linear_cmap +from hotsoss.plotting import plot_frame +from hotsoss.locate_trace import trace_polynomial +from scipy.ndimage.interpolation import rotate import numpy as np -from pysiaf.utils import rotations -from scipy.io import readsav +import pysiaf +import regions + +from ..utils import get_env_variables, check_for_data +# from .visibilityPA import using_gtvt +from .new_vis_plot import build_visibility_plot, get_exoplanet_positions +from .contamination_figure import contam + +Vizier.columns = ["**", "+_r"] +Gaia.MAIN_GAIA_TABLE = "gaiaedr3.gaia_source" # DR2 is default catalog +Gaia.ROW_LIMIT = 100 from exoctk import utils -EXOCTK_DATA = os.environ.get('EXOCTK_DATA') +APERTURES = {'NIS_SOSSFULL': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[0, 0, 2048, 2048], 'trim': [127, 126, 252, 1]}, + 'NIS_SUBSTRIP96': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 1888, 1888], 'trim': [47, 46, 0, 1]}, + 'NIS_SUBSTRIP256': {'inst': 'NIRISS', 'full': 'NIS_SOSSFULL', 'scale': 0.065, 'rad': 2.5, 'lam': [0.8, 2.8], 'subarr_x': [0, 2048, 2048, 0], 'subarr_y':[1792, 1792, 2048, 2048], 'trim': [127, 126, 0, 1]}, + 'NRCA5_GRISM256_F277W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [2.395, 3.179], 'trim': [0, 1, 0, 1]}, + 'NRCA5_GRISM256_F322W2': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [2.413, 4.083], 'trim': [0, 1, 0, 1]}, + 'NRCA5_GRISM256_F356W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [3.100, 4.041], 'trim': [0, 1, 0, 1]}, + 'NRCA5_GRISM256_F444W': {'inst': 'NIRCam', 'full': 'NRCA5_FULL', 'scale': 0.063, 'rad': 2.5, 'lam': [3.835, 5.084], 'trim': [0, 1, 1250, 1]}, + 'MIRIM_SLITLESSPRISM': {'inst': 'MIRI', 'full': 'MIRIM_FULL', 'scale': 0.11, 'rad': 2.0, 'lam': [5, 12], 'trim': [6, 5, 0, 1]}} -# If this is not checked, base ExoCTK does not go through. If None, -# users are warned by the import that contam_visibility will not work -# already in utils.py. -if EXOCTK_DATA is not None: +# Gaia color-Teff relation +GAIA_TEFFS = np.asarray(np.genfromtxt(resource_filename('exoctk', 'data/contam_visibility/predicted_gaia_colour.txt'), unpack=True)) - TRACES_PATH = os.path.join(os.environ.get('EXOCTK_DATA'), 'exoctk_contam', 'traces') +def SOSS_trace_mask(aperture, radius=20): + """ + Construct a trace mask for SOSS data + Parameters + ---------- + radius: int + The radius in pixels of the trace -def fieldSim(ra, dec, instrument, binComp='', testing=False): - """ Wraps ``sossFieldSim``, ``gtsFieldSim``, and ``lrsFieldSim`` - together. Produces a field simulation for a target using any - instrument (NIRISS, NIRCam, or MIRI). + Returns + ------- + np.ndarray + The SOSS trace mask + """ + # TODO: Implement order 3 trace in hotsoss.locate_trace.trace_polynomial then enable it here. + traces = trace_polynomial(evaluate=True) + ydim = APERTURES[aperture]['subarr_y'][2] - APERTURES[aperture]['subarr_y'][1] + mask1 = np.zeros((ydim, 2048)) + mask2 = np.zeros((ydim, 2048)) + mask3 = np.zeros((ydim, 2048)) + for col in np.arange(2048): + mask1[int(traces[0][col]) - radius: int(traces[0][col]) + radius, col] = 1 + mask2[int(traces[1][col]) - radius: int(traces[1][col]) + radius, col] = 1 + mask3[int(traces[2][col]) - radius: int(traces[2][col]) + radius, col] = 1 + + # Right referecnce pixels + mask1[:, :4] = 0 + + # Left reference pixels + mask1[:, -4:] = 0 + mask2[:, :4] = 0 + mask3[:, :4] = 0 + + # Top reference pixels + mask1[-5:, :] = 0 + mask2[-5:, :] = 0 + mask3[-5:, :] = 0 + + # Order 3 cutoff + mask3[:, 823:] = 0 + + return mask1, mask2, mask3 + + +def trace_dict(teffs=None): + """ + Load the trace data for all the given Teff values into a dictionary Parameters ---------- - ra : float - The RA of the target. - dec : float - The Dec of the target. - instrument : str - The instrument the contamination is being calculated for. - Can either be (case-sensitive): - 'NIRISS', 'NIRCam F322W2', 'NIRCam F444W', 'MIRI' - binComp : sequence - The parameters of a binary companion. - testing : bool - Shoud be ``True`` if running fieldSim for testing / - troubleshooting purposes. This will generate a matplotlib figure - showing the target FOV. The neighboring stars in this FOV will - be included in the contamination calculation (contamFig.py). + teffs: sequence + The teff values to fetch Returns ------- - simuCube : np.ndarray - The simulated data cube. Index 0 and 1 (axis=0) show the trace - of the target for orders 1 and 2 (respectively). Index 2-362 - show the trace of the target at every position angle (PA) of the - instrument. - plt.plot() : matplotlib object - A plot. Only if `testing` parameter is set to True. + dict + The trace data for the given Teff values """ - utils.check_for_data('exoctk_contam') + teff_dict = {} - # Calling the variables which depend on what instrument you use - if instrument == 'NIRISS': - simuCube = sossFieldSim(ra, dec, binComp) + if teffs is None: + teffs = np.arange(2000, 12100, 100) - elif instrument == 'NIRCam F444W': - simuCube = gtsFieldSim(ra, dec, 'F444W', binComp) + # Make sure they're ints + teffs = [int(teff) for teff in teffs] - elif instrument == 'NIRCam F322W2': - simuCube = gtsFieldSim(ra, dec, 'F322W2', binComp) + for teff in teffs: + teff_dict[teff] = get_trace('NIS_SUBSTRIP256', teff, verbose=False) - elif instrument == 'MIRI': - simuCube = lrsFieldSim(ra, dec, binComp) + return teff_dict - return simuCube +def find_stars(ra, dec, width=5*u.arcmin, catalog='Gaia', verbose=False): + """ + Find all the stars in the vicinity and estimate temperatures -def gtsFieldSim(ra, dec, filter, binComp=''): - """ Produce a Grism Time Series field simulation for a target. Parameters ---------- ra : float - The RA of the target. + The RA of the target in decimal degrees dec : float - The Dec of the target. - filter : str - The NIRCam filter being used. Can either be: - 'F444W' or 'F322W2' (case-sensitive) - binComp : sequence - The parameters of a binary companion. + The Dec of the target in decimal degrees + width: astropy.units.quantity + The width of the square search box Returns ------- - simuCube : np.ndarray - The simulated data cube. Index 0 and 1 (axis=0) show the trace - of the target for orders 1 and 2 (respectively). Index 2-362 - show the trace of the target at every position angle (PA) of the - instrument. + astropy.table.Table + The table of stars """ - # Instantiate a pySIAF object - siaf = pysiaf.Siaf('NIRCam') + # Converting to degrees and query for neighbors with 2MASS IRSA's fp_psc (point-source catalog) + targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit=u.deg if isinstance(ra, float) and isinstance(dec, float) else (u.hour, u.deg)) - full = siaf.apertures['NRCA5_FULL'] - if filter == 'F444W': - aper = siaf.apertures['NRCA5_GRISM256_F444W'] - elif filter == 'F322W2': - aper = siaf.apertures['NRCA5_GRISM256_F322W2'] + # Search Gaia for stars + if catalog == 'Gaia': - # Calling the variables - deg2rad = np.pi / 180 - subX, subY = aper.XSciSize, aper.YSciSize - rad = 2.5 # arcmins - V3PAs = np.arange(0, 360, 1) - nPA = len(V3PAs) - # Generate cube of field simulation at every degree of APA rotation - simuCube = np.zeros([nPA + 1, subY, subX]) - xSweet, ySweet = aper.reference_point('det') - add_to_v3pa = aper.V3IdlYAngle - # NIRCam Full Frame dimensions - rows, cols = full.corners('det') - minrow, maxrow = rows.min(), rows.max() - mincol, maxcol = cols.min(), cols.max() - - # STEP 1 - # Converting to degrees - targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit=(u.hour, u.deg)) - targetRA = targetcrd.ra.value - targetDEC = targetcrd.dec.value - - # Querying for neighbors with 2MASS IRSA's fp_psc (point-source catalog) - info = Irsa.query_region(targetcrd, catalog='fp_psc', spatial='Cone', - radius=rad * u.arcmin) - - # Coordinates of all the stars in FOV, including target - allRA = info['ra'].data.data - allDEC = info['dec'].data.data - - # Initiating a dictionary to hold all relevant star information - stars = {} - stars['RA'], stars['DEC'] = allRA, allDEC - - # STEP 2 - sindRA = (targetRA - stars['RA']) * np.cos(targetDEC) - cosdRA = targetDEC - stars['DEC'] - distance = np.sqrt(sindRA ** 2 + cosdRA ** 2) - if np.min(distance) > 1.0 * (10 ** -4): - coords = crd.SkyCoord(ra=ra, dec=dec, unit=(u.hour, u.deg)).to_string('decimal') - ra, dec = coords.split(' ')[0], coords.split(' ')[1] - raise Exception('Unable to detect a source with coordinates [RA: {}, DEC: {}] within IRSA`s 2MASS Point-Source Catalog. Please enter different coordinates or contact the JWST help desk.'.format(str(ra), str(dec))) - - targetIndex = np.argmin(distance) - - # Restoring model parameters - modelParam = readsav(os.path.join(TRACES_PATH, 'NIRISS', 'modelsInfo.sav'), - verbose=False) - jhMod = modelParam['jhmod'] - hkMod = modelParam['hkmod'] - teffMod = modelParam['teffmod'] - - # STEP 3 - # JHK bands of all stars in FOV, including target - Jmag = info['j_m'].data.data - Hmag = info['h_m'].data.data - Kmag = info['k_m'].data.data - # J-H band, H-K band. This will be used to derive the Teff - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Add any missing companion - if binComp != '': - bb = binComp[0] / 3600 / np.cos(allDEC[targetIndex] * deg2rad) - allRA = np.append(allRA, (allRA[targetIndex] + bb)) - allDEC = np.append(allDEC, (allDEC[targetIndex] + binComp[1] / 3600)) - Jmag = np.append(Jmag, binComp[2]) - Hmag = np.append(Kmag, binComp[3]) - Kmag = np.append(Kmag, binComp[4]) - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Number of stars - nStars = stars['RA'].size - - # Find/assign Teff of each star - starsT = np.empty(nStars) - for j in range(nStars): - color_separation = (J_Hobs[j] - jhMod)**2 + (H_Kobs[j] - hkMod)**2 - min_separation_ind = np.argmin(color_separation) - starsT[j] = teffMod[min_separation_ind] - - # Record keeping - stars['Temp'] = starsT - stars['Jmag'] = Jmag - - # STEP 4 - # Calculate corresponding V2/V3 (TEL) coordinates for Sweetspot - v2targ, v3targ = aper.det_to_tel(xSweet, ySweet) - - for V3PA in range(0, nPA, 1): - # Get APA from V3PA - APA = V3PA + add_to_v3pa - if APA > 360: - APA = APA - 360 - elif APA < 0: - APA = APA + 360 - - print('Generating field at APA : {}'.format(str(APA))) - - # Get target's attitude matrix for each Position Angle - attitude = rotations.attitude_matrix(v2targ, v3targ, - targetRA, targetDEC, - APA) - - xdet, ydet = [], [] - xsci, ysci = [], [] - for starRA, starDEC in zip(stars['RA'], stars['DEC']): - # Get the TEL coordinates of each star w attitude matrix - V2, V3 = rotations.sky_to_tel(attitude, starRA, starDEC) - # Convert to arcsec and turn to a float - V2, V3 = V2.to(u.arcsec).value, V3.to(u.arcsec).value - - XDET, YDET = aper.tel_to_det(V2, V3) - XSCI, YSCI = aper.det_to_sci(XDET, YDET) - - xdet.append(XDET) - ydet.append(YDET) - xsci.append(XSCI) - ysci.append(YSCI) - - # Record keeping - stars['xdet'], stars['ydet'] = np.array(xdet), np.array(ydet) - stars['xsci'], stars['ysci'] = np.array(xsci), np.array(ysci) - - sci_targx, sci_targy = stars['xsci'][targetIndex],\ - stars['ysci'][targetIndex] - - # STEP 5 - inFOV = [] - for star in range(0, nStars): - - x, y = stars['xdet'][star], stars['ydet'][star] - if (mincol < x) & (x < maxcol) & (minrow < y) & (y < maxrow): - inFOV.append(star) - - inFOV = np.array(inFOV) - - # STEP 6 - nircam_path = 'NIRCam_F444W' if filter == 'F444W' else 'NIRCam_F322W2' - fitsFiles = glob.glob( - os.path.join( - TRACES_PATH, - nircam_path, - 'rot*.fits')) - fitsFiles = np.sort(fitsFiles) - - for idx in inFOV: - - sci_dx = round(sci_targx - stars['xsci'][idx]) - sci_dy = round(sci_targy - stars['ysci'][idx]) - temp = stars['Temp'][idx] - - for file in fitsFiles: - if str(temp) in file: - trace = fits.getdata(file, 1)[0] - - fluxscale = 10.0**(-0.4 * (stars['Jmag'][idx] - stars['Jmag'][targetIndex])) - - # Padding array - pad_trace = np.pad(trace, pad_width=5000, mode='constant', - constant_values=0) - - # Determine the highest pixel value of trace - maxY, maxX = np.where(pad_trace == pad_trace.max()) - peakY, peakX = maxY[0], maxX[0] - - # Use relative distances (sci_dx, sci_dy) to find target - xTarg = peakX + sci_dx - yTarg = peakY + sci_dy - - # Use the (xTarg, yTarg) coordinates to slice out subarray - # remember X is columns, Y is rows - dimX0, dimX1 = xTarg - sci_targx, xTarg + subX - sci_targx - dimY0, dimY1 = yTarg - sci_targy, yTarg + subY - sci_targy - - if dimX0 < 0: - dimX0 = 0 - dimX1 = subX - if dimY0 < 0: - dimY0 = 0 - dimY1 = subY - - traceX, traceY = np.shape(pad_trace)[1], np.shape(pad_trace)[0] - if dimX1 > traceX: - dimX1 = traceX - dimX0 = traceX - subX - if dimY1 > traceY: - dimY1 = traceY - dimY0 = traceY - subY - - if (dimX1 < 0) or (dimY1 < 0): - continue - - # -1 because pySIAF is 1-indexed - mx0, mx1 = int(dimX0) - 1, int(dimX1) - 1 - my0, my1 = int(dimY0) - 1, int(dimY1) - 1 + if verbose: + print('Searching {} Catalog to find all stars within {} of RA={}, Dec={}...'.format(catalog, width, ra, dec)) + + stars = Gaia.query_object_async(coordinate=targetcrd, width=width, height=width) + + # Derived from K. Volk + # TODO: What to do for sources with no bp-rp color? Uses 2300K if missing. + stars['Teff'] = [GAIA_TEFFS[0][(np.abs(GAIA_TEFFS[1] - row['bp_rp'])).argmin()] for row in stars] + + # Calculate relative flux + stars['fluxscale'] = stars['phot_g_mean_flux'] / stars['phot_g_mean_flux'][0] + + # Star names + stars['name'] = [str(i) for i in stars['source_id']] + + # Catalog name + cat = 'I/350/gaiaedr3' + + # Search 2MASS + elif catalog == '2MASS': + stars = Irsa.query_region(targetcrd, catalog='fp_psc', spatial='Cone', radius=width) + + jhMod = np.array([0.545, 0.561, 0.565, 0.583, 0.596, 0.611, 0.629, 0.642, 0.66, 0.679, 0.696, 0.71, 0.717, 0.715, 0.706, 0.688, 0.663, 0.631, 0.601, 0.568, 0.537, 0.51, 0.482, 0.457, 0.433, 0.411, 0.39, 0.37, 0.314, 0.279]) + hkMod = np.array([0.313, 0.299, 0.284, 0.268, 0.257, 0.247, 0.24, 0.236, 0.229, 0.217,0.203, 0.188, 0.173, 0.159, 0.148, 0.138, 0.13, 0.123, 0.116, 0.112, 0.107, 0.102, 0.098, 0.094, 0.09, 0.086, 0.083, 0.079, 0.07, 0.067]) + teffMod = np.array([2800, 2900, 3000, 3100, 3200, 3300, 3400, 3500, 3600, 3700, 3800, 3900, 4000, 4100, 4200, 4300, 4400, 4500, 4600, 4700, 4800, 4900, 5000, 5100, 5200, 5300, 5400, 5500, 5800, 6000]) + + # Make sure colors are calculated + stars['j_h'] = stars['j_m'] - stars['h_m'] + stars['h_k'] = stars['h_m'] - stars['k_m'] + stars['j_k'] = stars['j_m'] - stars['k_m'] + + # Find Teff of each star from the color + stars['Teff'] = [teffMod[np.argmin((row['j_h'] - jhMod) ** 2 + (row['h_k'] - hkMod) ** 2)] for row in stars] + + # Calculate relative flux + stars['fluxscale'] = 10.0 ** (-0.4 * (stars['j_m'] - stars['j_m'][0])) + + # Star names + stars['name'] = [str(i) for i in stars['designation']] + + # Catalog name + cat = 'II/246/out' + + # Find distance from target to each star + sindRA = (stars['ra'][0] - stars['ra']) * np.cos(stars['dec'][0]) + cosdRA = stars['dec'][0] - stars['dec'] + stars.add_column(np.sqrt(sindRA ** 2 + cosdRA ** 2) * u.deg.to(u.arcsec), name='distance') + stars.sort('distance') - # Fleshing out index 0 of the simulation cube (trace of target) - if (sci_dx == 0) & (sci_dy == 0): # this is the target - - tr = pad_trace[my0:my1, mx0:mx1] * fluxscale - trX, trY = np.shape(tr)[1], np.shape(tr)[0] - - simuCube[0, 0:trY, 0:trX] = tr - - # Fleshing out indexes 1-361 of the simulation cube - # (trace of neighboring stars at every position angle) + # Add detector location to the table + stars.add_columns(np.zeros((10, len(stars))), names=['xtel', 'ytel', 'xdet', 'ydet', 'xsci', 'ysci', 'xord0', 'yord0', 'xord1', 'yord1']) + + # Add URL + urls = ['https://vizier.u-strasbg.fr/viz-bin/VizieR-5?-ref=VIZ62fa613b20f3fc&-out.add=.&-source={}&-c={}%20%2b{},eq=ICRS,rs=2&-out.orig=o'.format(cat, ra_deg, dec_deg) for ra_deg, dec_deg in zip(stars['ra'], stars['dec'])] + stars.add_column(urls, name='url') + + # Get traces + traces = trace_dict(teffs=np.unique(stars['Teff'])) + stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o1') + stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o2') + stars.add_column([np.zeros((256, 2048))] * len(stars), name='trace_o3') + for star in stars: + star['trace_o1'], star['trace_o2'], star['trace_o3'] = traces[int(star['Teff'])] + + return stars + + +def add_star(startable, name, ra, dec, teff, fluxscale=None, delta_mag=None, dist=None, pa=None): + """ + Add a star to the star table + + Parameters + ---------- + startable: astropy.table.Table + The table of stars to add to + name: str + An identifier for the star + ra: float + The RA in decimal degrees + dec: float + The Dec in decimal degrees + teff: float + The effective temperature of the star + fluxscale: float + The star's flux relative to the target flux + delta_mag: float + The star's magnitude relative to the target magnitude + dist: float + The distance of the new star from the given RA and Dec in arcseconds + pa: float + The position angle of the new star relative to the given RA and Dec in degrees + + Returns + ------- + astropy.table.Table + The updated table of stars + """ + # Default + fluxscale = fluxscale or 1 + + # Convert mag to flux if necessary + if delta_mag is not None: + fluxscale = 10.0 ** (-0.4 * delta_mag) + + # Apply offset and position angle + if dist is not None and pa is not None: + coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg, frame='icrs') + newcoord = coord.directional_offset_by(pa * u.deg, dist * u.arcsec) + ra = newcoord.ra.degree + dec = newcoord.dec.degree + + # Get the trace + trace = get_trace('NIS_SUBSTRIP256', teff, verbose=False) + + # Add the row to the table + startable.add_row({'name': name, 'ra': ra, 'dec': dec, 'Teff': teff, 'fluxscale': fluxscale, 'distance': dist, 'trace_o1': trace[0], 'trace_o2': trace[1], 'trace_o3': trace[2]}) + startable.sort('distance') + + return startable + + +def calc_v3pa(V3PA, stars, aperture, data=None, c0x0=885, c0y0=1462, c1x0=-0.11, c1y0=0, c1y1=0, c1x1=0.02, tilt=0, ord0scale=1, ord1scale=1, plot=False, verbose=False): + """ + Calculate the V3 position angle for each target at the given PA + + Parameters + ---------- + V3PA: float + The PA in V3 + stars: astropy.table.Table + The table of stars in the target vicinity + aperture: pysiaf.aperture.JwstAperture, str + The aperture object for the given mode + ref: str + The reference frame to plot in, ['tel', 'det', 'sci'] + floor: float + The noise floor to zero out + plot: bool + Plot the full frame and subarray bounds with all traces + verbose: bool + Print statements + + Returns + ------- + targframe, starframe + The frame containing the target trace and a frame containing all contaminating star traces + """ + if verbose: + print("Checking PA={} with {} stars in the vicinity".format(V3PA, len(stars['ra']))) + + if isinstance(aperture, str): + + if verbose: + print("Getting aperture info from pysiaf...") + + # Aperture names + if aperture not in APERTURES: + raise ValueError("Aperture '{}' not supported. Try {}".format(aperture, list(APERTURES.keys()))) + + # Instantiate a pySIAF object + inst = APERTURES[aperture] + siaf = pysiaf.Siaf(inst['inst']) + + # Get the full and subarray apertures + full = siaf.apertures[inst['full']] + aperture = siaf.apertures[aperture] + + # Full frame pixel positions + rows, cols = full.corners('det') + aperture.minrow, aperture.maxrow = rows.min(), rows.max() + aperture.mincol, aperture.maxcol = cols.min(), cols.max() + + # Get APA from V3PA + APA = V3PA + aperture.V3IdlYAngle + if APA > 360: + APA = APA - 360 + elif APA < 0: + APA = APA + 360 + + # Aperture info + aper = APERTURES[aperture.AperName] + subY, subX = aper['subarr_y'][2] - aper['subarr_y'][1], aper['subarr_x'][1] - aper['subarr_x'][0] + + # Calculate corresponding V2/V3 (TEL) coordinates for Sweetspot + stars['xdet'][0], stars['ydet'][0] = aperture.reference_point('det') + stars['xtel'][0], stars['ytel'][0] = aperture.det_to_tel(stars['xdet'][0], stars['ydet'][0]) + stars['xsci'][0], stars['ysci'][0] = aperture.det_to_sci(stars['xdet'][0], stars['ydet'][0]) + + # Order 0 location relative to pysiaf SCI coordinates + x_sweet = 2865 + y_sweet = 1720 + stars['xord0'][0] = int(stars['xsci'][0] + c0x0 + c1x0 * (stars['ysci'][0] + c0y0 - y_sweet)) + stars['yord0'][0] = int(stars['ysci'][0] + c0y0) + stars['xord1'][0] = stars['xord0'][0] - x_sweet + aper['subarr_x'][0] + stars['yord1'][0] = stars['yord0'][0] - y_sweet + aper['subarr_y'][1] + + # Get target's attitude matrix for each Position Angle + attitude = pysiaf.utils.rotations.attitude_matrix(stars['xtel'][0], stars['ytel'][0], stars['ra'][0], stars['dec'][0], APA) + + # Get relative coordinates of the stars based on target attitude + if verbose: + print("Getting star locations for {} stars at PA={} from pysiaf...".format(len(stars), APA)) + + for idx, star in enumerate(stars[1:]): + + # Get the TEL coordinates (V2, V3) of the star + V2, V3 = pysiaf.utils.rotations.sky_to_tel(attitude, star['ra'], star['dec']) + star['xtel'], star['ytel'] = V2.to(u.arcsec).value, V3.to(u.arcsec).value + + # Get the DET coordinates of the star + star['xdet'], star['ydet'] = aperture.tel_to_det(star['xtel'], star['ytel']) + + # Get the DET coordinates of the star + star['xsci'], star['ysci'] = aperture.det_to_sci(star['xdet'], star['ydet']) + + # Order 0 location relative to pysiaf SCI coordinates + star['xord0'] = int(star['xsci'] + c0x0 + c1x0 * (star['ysci'] + c0y0 - y_sweet)) + star['yord0'] = int(star['ysci'] + c0y0) + + # Order 1/2/3 location relative to order 0 location + x_shift = int(c1x0 + c1x1 * (stars[0]['xord0'] - star['xord0'])) + y_shift = int(c1y0 + c1y1 * (stars[0]['yord0'] - star['yord0']) - c1x1 * (stars[0]['xord0'] - star['xord0'])) + star['xord1'] = star['xord0'] - x_sweet + aper['subarr_x'][0] + x_shift + star['yord1'] = star['yord0'] - y_sweet + aper['subarr_y'][1] + y_shift + + # Just stars in FOV (Should always have at least 1, the target) + lft, rgt, top, bot = 700, 5100, 1940, 1400 + FOVstars = stars[(lft < stars['xord0']) & (stars['xord0'] < rgt) & (bot < stars['yord0']) & (stars['yord0'] < top)] + + if verbose: + print("Calculating contamination from {} other stars in the FOV".format(len(FOVstars) - 1)) + + # Make frame for the target and a frame for all the other stars + targframe_o1 = np.zeros((subY, subX)) + targframe_o2 = np.zeros((subY, subX)) + targframe_o3 = np.zeros((subY, subX)) + starframe = np.zeros((subY, subX)) + + if plot: + # Set up hover tool + tips = [('Name', '@name'), ('RA', '@ra'), ('DEC', '@dec'), ('scale', '@fluxscale'), ('Teff', '@Teff'), ('ord0', '@xord0{int}, @yord0{int}')] + hover = HoverTool(tooltips=tips, name='stars') + crosshair = CrosshairTool(dimensions="height") + taptool = TapTool(behavior='select', callback=OpenURL(url="@url")) + + # Make the plot + tools = ['pan', crosshair, 'reset', 'box_zoom', 'wheel_zoom', 'save', taptool, hover] + fig = figure(title='Generated FOV from Gaia EDR3', width=900, height=subY, match_aspect=True, tools=tools) + fig.title = '({}, {}) at PA={} in {}'.format(stars[0]['ra'], stars[0]['dec'], V3PA, aperture.AperName) + + # Plot config + scale = 'log' + color_map = 'Viridis256' + + # Plot the obs data if possible + if data is not None: + vmax = np.nanmax(data) + if scale == 'log': + mapper = LogColorMapper(palette=color_map, low=1, high=vmax) else: + mapper = LinearColorMapper(palette=color_map, low=0, high=vmax) + data[data < 0] = 0 + data = rotate(data, tilt) + fig.image([data], x=0, y=2048 - data.shape[0], dh=data.shape[0], dw=2048, color_mapper=mapper) + + # Get order 0 + order0 = get_order0(aperture.AperName) * 1.5e8 # Scaling factor based on observations + + # SOSS trace masks + mask1, mask2, mask3 = SOSS_trace_mask(aperture.AperName) + + # Iterate over all stars in the FOV and add their scaled traces to the correct frame + for idx, star in enumerate(FOVstars): + + # Scale the order 0 image and get dims + scale0 = copy(order0) * star['fluxscale'] * ord0scale + dim0y, dim0x = scale0.shape + dim0y0 = int(dim0y / 2) + dim0y1 = dim0y - dim0y0 + dim0x0 = int(dim0x / 2) + dim0x1 = dim0x - dim0x0 + + # Locations of the order 0 pixels on the subarray + f0x0, f1x0 = int(max(aper['subarr_x'][0], star['xord0'] - dim0x0)), int(min(aper['subarr_x'][1], star['xord0'] + dim0x1)) + f0y0, f1y0 = int(max(aper['subarr_y'][1], star['yord0'] - dim0y0)), int(min(aper['subarr_y'][2], star['yord0'] + dim0y1)) + + if 0 < f1x0 - f0x0 <= dim0x and 0 < f1y0 - f0y0 <= dim0y: + + # How many pixels of the order 0 image fall on the subarray + t0x0 = dim0x - (f1x0 - f0x0) if f0x0 == aper['subarr_x'][0] else 0 + t1x0 = f1x0 - f0x0 if f1x0 == aper['subarr_x'][1] else dim0x + t0y0 = dim0y - (f1y0 - f0y0) if f0y0 == aper['subarr_y'][0] else 0 + t1y0 = f1y0 - f0y0 if f1y0 == aper['subarr_y'][2] else dim0y + + # if verbose: + # print("{} x {} pixels of star {} order 0 fall on {}".format(t1y0 - t0y0, t1x0 - t0x0, idx, aperture.AperName)) + + # Target order 0 is never on the subarray so add all order 0s to the starframe + starframe[f0y0 - aper['subarr_y'][1]:f1y0 - aper['subarr_y'][1], f0x0 - aper['subarr_x'][1]:f1x0 - aper['subarr_x'][0]] += scale0[t0y0:t1y0, t0x0:t1x0] + + # Higher Orders ============================================================================ + + # Get the appropriate trace + trace_o1 = star['trace_o1'] + trace_o2 = star['trace_o2'] + trace_o3 = star['trace_o3'] + + # Orient trace if need be + if 'NIS' in aperture.AperName: + trace_o1 = np.rot90(trace_o1.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations + trace_o2 = np.rot90(trace_o2.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations + trace_o3 = np.rot90(trace_o3.T[:, ::-1] * 1.5, k=1) # Scaling factor based on observations + + # Pad or trim SUBSTRIP256 simulation for SUBSTRIP96 or FULL frame + if aperture.AperName == 'NIS_SOSSFULL': + trace_o1 = np.pad(trace_o1, ((1792, 0), (0, 0)), 'constant') + trace_o2 = np.pad(trace_o2, ((1792, 0), (0, 0)), 'constant') + trace_o3 = np.pad(trace_o3, ((1792, 0), (0, 0)), 'constant') + elif aperture.AperName == 'NIS_SUBSTRIP96': + trace_o1 = trace_o1[:96, :] + trace_o2 = trace_o2[:96, :] + trace_o3 = trace_o3[:96, :] + + # Get the trace and shift into the correct subarray position + # trace *= mask1 + mask2 + mask3 + + # Scale the order 1, 2, 3 image and get dims + trace_o1 *= star['fluxscale'] * ord1scale + if aperture.AperName.startswith('NIS'): + trace_o2 *= star['fluxscale'] * ord1scale + trace_o3 *= star['fluxscale'] * ord1scale + dimy, dimx = trace_o1.shape + + # Func to remove background + # def bkg(trc, cutoff=0.00001): + # trc[trc < 0] = cutoff + # trc[np.isnan(trc)] = cutoff + # trc[np.isinf(trc)] = cutoff + # return trc + + # Trim background + # trace_o1 = bkg(trace_o1) + # if aperture.AperName.startswith('NIS'): + # trace_o2 = bkg(trace_o2) + # trace_o3 = bkg(trace_o3) + + # Location of full trace footprint + fpx0 = int(star['xord1']) + fpx1 = int(fpx0 + dimx) + fpy0 = int(star['yord1']) + fpy1 = int(fpy0 + dimy) + + # Locations of the trace pixels on the subarray + f0x, f1x = max(aper['subarr_x'][0], fpx0), min(aper['subarr_x'][1], fpx1) + f0y, f1y = max(aper['subarr_y'][1], fpy0), min(aper['subarr_y'][2], fpy1) + + # print(idx, f0x, f1x, f0y, f1y) + if 0 < f1x - f0x <= dimx and 0 < f1y - f0y <= dimy: + + # How many pixels of the trace image fall on the subarray + t0x = dimx - (f1x - f0x) if f0x == aper['subarr_x'][0] else 0 + t1x = f1x - f0x if f1x == aper['subarr_x'][1] else dimx + t0y = dimy - (f1y - f0y) if f0y == aper['subarr_y'][0] else 0 + t1y = f1y - f0y if f1y == aper['subarr_y'][2] else dimy + + # if verbose: + # print("{} x {} pixels of star {} trace fall on {}".format(t1y - t0y, t1x - t0x, idx, aperture.AperName)) + + # Box to show footprint of full trace + # fig.patch([fpx0, fpx1, fpx1, fpx0], [fpy0, fpy0, fpy1, fpy1], line_color="black", fill_color='black', fill_alpha=0.1) + + # Add each order to it's own frame + if idx == 0: + + targframe_o1[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o1[t0y:t1y, t0x:t1x] + if aperture.AperName.startswith('NIS'): + targframe_o2[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o2[t0y:t1y, t0x:t1x] + targframe_o3[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o3[t0y:t1y, t0x:t1x] + + # Add all orders to the same frame + else: + + starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o1[t0y:t1y, t0x:t1x] + if aperture.AperName.startswith('NIS'): + starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o2[t0y:t1y, t0x:t1x] + starframe[f0y - aper['subarr_y'][1]:f1y - aper['subarr_y'][1], f0x - aper['subarr_x'][1]:f1x - aper['subarr_x'][0]] += trace_o3[t0y:t1y, t0x:t1x] - tr = pad_trace[my0:my1, mx0:mx1] * fluxscale - trX, trY = np.shape(tr)[1], np.shape(tr)[0] - simuCube[V3PA + 1, 0:trY, 0:trX] += tr + # Contam per order + simframe_o1 = targframe_o1 + starframe + simframe_o2 = targframe_o2 + starframe + simframe_o3 = targframe_o3 + starframe + simframe = targframe_o1 + targframe_o2 + targframe_o3 + starframe - return simuCube + # Calculate contam/total counts in each detector column + pctframe_o1 = starframe / simframe_o1 + pctframe_o2 = starframe / simframe_o2 + pctframe_o3 = starframe / simframe_o3 + pctline_o1 = np.nanmean(pctframe_o1 * mask1, axis=0) + pctline_o2 = np.nanmean(pctframe_o2 * mask2, axis=0) + pctline_o3 = np.nanmean(pctframe_o3 * mask3, axis=0) + result = {'pa': V3PA, 'target': targframe_o1 + targframe_o2 + targframe_o3, 'target_o1': targframe_o1, 'target_o2': targframe_o2, 'target_o3': targframe_o3, 'contaminants': starframe, 'sources': FOVstars, 'order1_contam': pctline_o1, 'order2_contam': pctline_o2, 'order3_contam': pctline_o3} + + if plot: + + # Plot the simulated frame + vmax = np.nanmax(simframe) + if scale == 'log': + mapper = LogColorMapper(palette=color_map, low=1, high=vmax) + else: + mapper = LinearColorMapper(palette=color_map, low=0, high=vmax) + + # Only plot the simulation if no data is available to plot + if data is None: + fig.image(image=[simframe], x=aper['subarr_x'][0], dw=subX, y=aper['subarr_y'][1], dh=subY, color_mapper=mapper) + + mapper = linear_cmap(field_name='Teff', palette=Spectral6, low=np.nanmin(FOVstars['Teff']), high=np.nanmax(FOVstars['Teff'])) + + # Plot order 0 locations + fig.circle('xord0', 'yord0', color=mapper, size=15, line_width=3, fill_color=None, name='stars', source=dict(FOVstars[['Teff', 'xord0', 'yord0', 'ra', 'dec', 'name', 'url', 'fluxscale', 'xdet', 'ydet', 'xtel', 'ytel']])) + fig.circle([x_sweet], [y_sweet], size=10, line_width=3, fill_color=None, line_color='black') + + fig = plot_traces(FOVstars, fig) + + # Show the figure + fig.x_range = Range1d(aper['subarr_x'][0], aper['subarr_x'][1]) + fig.y_range = Range1d(aper['subarr_y'][1], aper['subarr_y'][2]) + + # Source for ratio plot + rsource = ColumnDataSource(data=dict(x=np.arange(subX), zeros=np.zeros(subX), o1=pctline_o1, o2=pctline_o2, o3=pctline_o3)) + + # Make plot + rfig = figure(title='Target Contamination', width=900, height=200, match_aspect=True, tools=tools, x_range=fig.x_range) + rfig.line(np.arange(subX), pctline_o1, color='blue', legend_label='Order 1') + glyph1 = VArea(x='x', y1='zeros', y2='o1', fill_color="blue", fill_alpha=0.3) + rfig.add_glyph(rsource, glyph1) + if aperture.AperName not in ['NIS_SUBSTRIP96']: + rfig.line(np.arange(subX), pctline_o2, color='red', legend_label='Order 2') + glyph2 = VArea(x='x', y1='zeros', y2='o2', fill_color="red", fill_alpha=0.3) + rfig.add_glyph(rsource, glyph2) + rfig.line(np.arange(subX), pctline_o3, color='green', legend_label='Order 3') + glyph3 = VArea(x='x', y1='zeros', y2='o3', fill_color="green", fill_alpha=0.3) + rfig.add_glyph(rsource, glyph3) + rfig.y_range = Range1d(0, min(1, max(pctline_o1.max(), pctline_o2.max(), pctline_o3.max()))) + rfig.yaxis.axis_label = 'Contam / Total Counts' + rfig.xaxis.axis_label = 'Detector Column' + + # Color bar + # color_bar = ColorBar(color_mapper=mapper['transform'], width=10, location=(0, 0), title="Teff") + # fig.add_layout(color_bar, 'right') + + # Plot grid + gp = gridplot([[fig], [rfig]]) + + return result, gp + + return result + + +def plot_traces(star_table, fig, color='red'): + """ + PLot the trace locations of all the stars in the table + + Parameters + ---------- + star_table: astropy.table.Table + The table of stars + fig: bokeh.plotting.figure.Figure + The figure to plot on + + Returns + ------- + fig + The figure + """ + + # Trace extends in dispersion direction further than 2048 subarray edges + blue_ext = 150 + red_ext = 200 + + # Get the new x-ranges + xr0 = np.linspace(-blue_ext, 2048 + red_ext, 1000) + xr1 = np.linspace(-blue_ext, 1820 + red_ext, 1000) + xr2 = np.linspace(-blue_ext, 1130 + red_ext, 1000) + + # Add the y-intercept to the c0 coefficient + polys = trace_polynomial() + yr0 = np.polyval(polys[0], xr0) + yr1 = np.polyval(polys[1], xr1) + yr2 = np.polyval(polys[2], xr2) + + for idx, star in enumerate(star_table): + # Order 1/2/3 location relative to order 0 + fig.line(xr0 + star['xord1'], yr0 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') + fig.line(xr1 + star['xord1'], yr1 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') + fig.line(xr2 + star['xord1'], yr2 + star['yord1'], color=color, line_dash='solid' if idx == 0 else 'dashed') + + return fig + + +def field_simulation(ra, dec, aperture, binComp=None, n_jobs=-1, plot=False, multi=True, verbose=True): + """Produce a contamination field simulation at the given sky coordinates -def lrsFieldSim(ra, dec, binComp=''): - """ Produce a Grism Time Series field simulation for a target. Parameters ---------- ra : float - The RA of the target. + The RA of the target dec : float - The Dec of the target. - binComp : sequence - The parameters of a binary companion. + The Dec of the target + aperture: str + The aperture to use, ['NIS_SUBSTRIP96', 'NIS_SUBSTRIP256', 'NRCA5_GRISM256_F444W', 'NRCA5_GRISM256_F322W2', 'MIRI_SLITLESSPRISM'] + binComp : dict + A dictionary of parameters for a binary companion with keys {'name', 'ra', 'dec', 'fluxscale', 'teff'} + n_jobs: int + Number of cores to use (-1 = All) Returns ------- simuCube : np.ndarray - The simulated data cube. Index 0 and 1 (axis=0) show the trace - of the target for orders 1 and 2 (respectively). Index 2-362 - show the trace of the target at every position angle (PA) of the - instrument. + The simulated data cube. Index 0 and 1 (axis=0) show the trace of + the target for orders 1 and 2 (respectively). Index 2-362 show the trace + of the target at every position angle (PA) of the instrument. + plt: NoneType, bokeh.plotting.figure + The plot of the contaminationas a function of PA + + Example + ------- + from exoctk.contam_visibility import field_simulator as fs + ra, dec = 91.872242, -25.594934 + targframe, starcube, results = fs.field_simulation(ra, dec, 'NIS_SUBSTRIP256') """ - # INSTRUMENT PARAMETERS + # Check for contam tool data + check_for_data('exoctk_contam') + + # Aperture names + if aperture not in APERTURES: + raise ValueError("Aperture '{}' not supported. Try {}".format(aperture, list(APERTURES.keys()))) + # Instantiate a pySIAF object - siaf = pysiaf.Siaf('MIRI') - aper = siaf.apertures['MIRIM_SLITLESSPRISM'] - full = siaf.apertures['MIRIM_FULL'] + if verbose: + print('Getting info from pysiaf for {} aperture...'.format(aperture)) + + targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit=u.deg) + inst = APERTURES[aperture] + siaf = pysiaf.Siaf(inst['inst']) - # Calling the variables - deg2rad = np.pi / 180 + # Get the full and subarray apertures + full = siaf.apertures[inst['full']] + aper = siaf.apertures[aperture] subX, subY = aper.XSciSize, aper.YSciSize - rad = 2.0 # arcmins - V3PAs = np.arange(0, 360, 1) - nPA = len(V3PAs) - # Generate cube of field simulation at every degree of APA rotation - simuCube = np.zeros([nPA + 1, subY, subX]) - xSweet, ySweet = aper.reference_point('det') - add_to_v3pa = aper.V3IdlYAngle - # MIRI Full Frame dimensions + + # Full frame pixel positions rows, cols = full.corners('det') - minrow, maxrow = rows.min(), rows.max() - mincol, maxcol = cols.min(), cols.max() - - # STEP 1 - # Converting to degrees - targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit=(u.hour, u.deg)) - targetRA = targetcrd.ra.value - targetDEC = targetcrd.dec.value - - # Querying for neighbors with 2MASS IRSA's fp_psc (point-source catalog) - info = Irsa.query_region(targetcrd, catalog='fp_psc', spatial='Cone', - radius=rad * u.arcmin) - - # Coordinates of all the stars in FOV, including target - allRA = info['ra'].data.data - allDEC = info['dec'].data.data - - # Initiating a dictionary to hold all relevant star information - stars = {} - stars['RA'], stars['DEC'] = allRA, allDEC - - # STEP 2 - sindRA = (targetRA - stars['RA']) * np.cos(targetDEC) - cosdRA = targetDEC - stars['DEC'] - distance = np.sqrt(sindRA ** 2 + cosdRA ** 2) - if np.min(distance) > 1.0 * (10 ** -4): - coords = crd.SkyCoord(ra=ra, dec=dec, unit=(u.hour, u.deg)).to_string('decimal') - ra, dec = coords.split(' ')[0], coords.split(' ')[1] - raise Exception('Unable to detect a source with coordinates [RA: {}, DEC: {}] within IRSA`s 2MASS Point-Source Catalog. Please enter different coordinates or contact the JWST help desk.'.format(str(ra), str(dec))) - - targetIndex = np.argmin(distance) - - # Restoring model parameters - modelParam = readsav(os.path.join(TRACES_PATH, 'NIRISS', 'modelsInfo.sav'), - verbose=False) - jhMod = modelParam['jhmod'] - hkMod = modelParam['hkmod'] - teffMod = modelParam['teffmod'] - - # STEP 3 - # JHK bands of all stars in FOV, including target - Jmag = info['j_m'].data.data - Hmag = info['h_m'].data.data - Kmag = info['k_m'].data.data - # J-H band, H-K band. This will be used to derive the Teff - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Add any missing companion - if binComp != '': - bb = binComp[0] / 3600 / np.cos(allDEC[targetIndex] * deg2rad) - allRA = np.append(allRA, (allRA[targetIndex] + bb)) - allDEC = np.append(allDEC, (allDEC[targetIndex] + binComp[1] / 3600)) - Jmag = np.append(Jmag, binComp[2]) - Hmag = np.append(Kmag, binComp[3]) - Kmag = np.append(Kmag, binComp[4]) - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Number of stars - nStars = stars['RA'].size - - # Find/assign Teff of each star - starsT = np.empty(nStars) - for j in range(nStars): - color_separation = (J_Hobs[j] - jhMod)**2 + (H_Kobs[j] - hkMod)**2 - min_separation_ind = np.argmin(color_separation) - starsT[j] = teffMod[min_separation_ind] - - # Record keeping - stars['Temp'] = starsT - stars['Jmag'] = Jmag - - # STEP 4 - # Calculate corresponding V2/V3 (TEL) coordinates for Sweetspot - v2targ, v3targ = aper.det_to_tel(xSweet, ySweet) - - for V3PA in range(0, nPA, 1): - # Get APA from V3PA - APA = V3PA + add_to_v3pa - if APA > 360: - APA = APA - 360 - elif APA < 0: - APA = APA + 360 - - print('Generating field at APA : {}'.format(str(APA))) - - # Get target's attitude matrix for each Position Angle - attitude = rotations.attitude_matrix(v2targ, v3targ, - targetRA, targetDEC, - APA) - - xdet, ydet = [], [] - xsci, ysci = [], [] - for starRA, starDEC in zip(stars['RA'], stars['DEC']): - # Get the TEL coordinates of each star w attitude matrix - V2, V3 = rotations.sky_to_tel(attitude, starRA, starDEC) - # Convert to arcsec and turn to a float - V2, V3 = V2.to(u.arcsec).value, V3.to(u.arcsec).value - - XDET, YDET = aper.tel_to_det(V2, V3) - XSCI, YSCI = aper.det_to_sci(XDET, YDET) - - xdet.append(XDET) - ydet.append(YDET) - xsci.append(XSCI) - ysci.append(YSCI) - - # Record keeping - stars['xdet'], stars['ydet'] = np.array(xdet), np.array(ydet) - stars['xsci'], stars['ysci'] = np.array(xsci), np.array(ysci) - - sci_targx, sci_targy = stars['xsci'][targetIndex],\ - stars['ysci'][targetIndex] - # STEP 5 - inFOV = [] - for star in range(0, nStars): - - x, y = stars['xdet'][star], stars['ydet'][star] - if (mincol < x) & (x < maxcol) & (minrow < y) & (y < maxrow): - inFOV.append(star) - - inFOV = np.array(inFOV) - - # STEP 6 - fitsFiles = glob.glob(os.path.join(TRACES_PATH, 'MIRI', 'LOW*.fits')) - fitsFiles = np.sort(fitsFiles) - - for idx in inFOV: - - sci_dx = round(sci_targx - stars['xsci'][idx]) - sci_dy = round(sci_targy - stars['ysci'][idx]) - - temp = stars['Temp'][idx] - - for file in fitsFiles: - if str(temp) in file: - trace = fits.getdata(file)[0] - - fluxscale = 10.0**(-0.4 * (stars['Jmag'][idx] - stars['Jmag'][targetIndex])) - - # Padding array - pad_trace = np.pad(trace, pad_width=5000, mode='constant', - constant_values=0) - - # Determine the highest pixel value of trace - maxY, maxX = np.where(pad_trace == pad_trace.max()) - peakY, peakX = maxY[0], maxX[0] - - # Use relative distances (sci_dx, sci_dy) to find target - # xTarg,yTarg are essentially the "sweetspot" in the PADDED arr - xTarg = peakX + sci_dx - yTarg = peakY + sci_dy - - # Use the (xTarg, yTarg) coordinates to slice out subarray - # remember X is columns, Y is rows - dimX0, dimX1 = xTarg - sci_targx, xTarg + subX - sci_targx - dimY0, dimY1 = yTarg - sci_targy, yTarg + subY - sci_targy - - if dimX0 < 0: - dimX0 = 0 - dimX1 = subX - if dimY0 < 0: - dimY0 = 0 - dimY1 = subY - - traceX, traceY = np.shape(pad_trace)[1], np.shape(pad_trace)[0] - if dimX1 > traceX: - dimX1 = traceX - dimX0 = traceX - subX - if dimY1 > traceY: - dimY1 = traceY - dimY0 = traceY - subY - - if (dimX1 < 0) or (dimY1 < 0): - continue + aper.minrow, aper.maxrow = rows.min(), rows.max() + aper.minrow, aper.maxrow = rows.min(), rows.max() + aper.mincol, aper.maxcol = cols.min(), cols.max() + + # Find stars in the vicinity + stars = find_stars(ra, dec, verbose=verbose) + + # Add stars manually + if isinstance(binComp, dict): + stars = add_star(stars, **binComp) + + # Set the number of cores for multiprocessing + max_cores = 8 + if n_jobs == -1 or n_jobs > max_cores: + n_jobs = max_cores + + # Get full list from ephemeris + ra_hms, dec_dms = re.sub('[a-z]', ':', targetcrd.to_string('hmsdms')).split(' ') + goodPAs = get_exoplanet_positions(ra_hms, dec_dms, in_FOR=True) + + # Get all observable PAs and convert to ints + goodPA_vals = list(goodPAs[~goodPAs['{}_min_pa_angle'.format(inst['inst'].upper())].isna()]['{}_min_pa_angle'.format(inst['inst'].upper())]) + list(goodPAs[~goodPAs['{}_nominal_angle'.format(inst['inst'].upper())].isna()]['{}_nominal_angle'.format(inst['inst'].upper())]) + list(goodPAs[~goodPAs['{}_max_pa_angle'.format(inst['inst'].upper())].isna()]['{}_max_pa_angle'.format(inst['inst'].upper())]) + goodPA_ints = np.sort(np.unique(np.array(goodPA_vals).astype(int))) + + # Group good PAs to find gaps in visibility + good_groups = [] + current_group = [goodPA_ints[0]] + max_gap = 7 # Biggest PA gap considered to still be observable + for i in range(1, len(goodPA_ints)): + if goodPA_ints[i] - current_group[-1] <= max_gap: + current_group.append(goodPA_ints[i]) + else: + good_groups.append(current_group) + current_group = [goodPA_ints[i]] + + good_groups.append(current_group) + good_group_bounds = [(min(grp), max(grp)) for grp in good_groups] + goodPA_list = np.concatenate([np.arange(grp[0], grp[1]+1) for grp in good_group_bounds]).ravel() + + # Flatten list and check against 360 angles to get all bad PAs + # badPA_list = [pa for pa in pa_list if pa not in goodPA_list] + + # Time it + if verbose: + print('Calculating target contamination from {} neighboring sources in position angle ranges {}...'.format(len(stars), good_group_bounds)) + start = time.time() + + # Calculate contamination of all stars at each PA + # ----------------------------------------------- + # To multiprocess, or not to multiprocess. That is the question. + # Whether 'tis nobler in the code to suffer + # The slings and arrows of outrageous list comprehensions, + # Or to take arms against a sea of troubles, + # And by multiprocessing end them? + if multi: + pl = pool.ThreadPool(n_jobs) + func = partial(calc_v3pa, stars=stars, aperture=aper, plot=False, verbose=False) + results = pl.map(func, goodPA_list) + pl.close() + pl.join() + + else: + results = [] + for pa in goodPA_list: + result = calc_v3pa(pa, stars=stars, aperture=aper, plot=False, verbose=False) + results.append(result) + + # We only need one target frame frames + targframe_o1 = np.asarray(results[0]['target_o1']) + targframe_o2 = np.asarray(results[0]['target_o2']) + targframe_o3 = np.asarray(results[0]['target_o3']) + targframe = [targframe_o1, targframe_o2, targframe_o3] + + # Make sure starcube is of shape (PA, rows, cols) + starcube = np.zeros((360, targframe_o1.shape[0], targframe_o1.shape[1])) + + # Make the contamination plot + for result in results: + starcube[result['pa'], :, :] = result['contaminants'] + + if verbose: + print('Contamination calculation complete: {} {}'.format(round(time.time() - start, 3), 's')) + + # Make contam plot + if plot: + contam_slider_plot(results, plot=plot) + + return targframe, starcube, results + + +def contam_slider_plot(contam_results, threshold=0.05, plot=False): + """ + Make the contamination plot with a slider - # -1 because pySIAF is 1-indexed - mx0, mx1 = int(dimX0) - 1, int(dimX1) - 1 - my0, my1 = int(dimY0) - 1, int(dimY1) - 1 + Parameters + ---------- + contam_results: dict + The dictionary of results from the field_simulation function + plot: bool + Show the plot if True - # Fleshing out index 0 of the simulation cube (trace of target) - if (sci_dx == 0) & (sci_dy == 0): # this is the target + Returns + ------- + bokeh.layouts.column + The column of plots + """ + # Full PA list + pa_list = np.arange(360) + goodPA_list = [result['pa'] for result in contam_results] + badPA_list = [pa for pa in pa_list if pa not in goodPA_list] + + # Grab one target frame + targframe = np.asarray(contam_results[0]['target']) + + # Make the contamination plot + order1_contam = np.zeros((360, targframe.shape[1])) + order2_contam = np.zeros((360, targframe.shape[1])) + order3_contam = np.zeros((360, targframe.shape[1])) + for result in contam_results: + order1_contam[result['pa'], :] = result['order1_contam'] + order2_contam[result['pa'], :] = result['order2_contam'] + order3_contam[result['pa'], :] = result['order3_contam'] + + # Define data + contam_dict = {'contam1_{}'.format(result['pa']): result['order1_contam'] for result in contam_results} + contam_dict.update({'contam2_{}'.format(result['pa']): result['order2_contam'] for result in contam_results}) + contam_dict.update({'contam3_{}'.format(result['pa']): result['order3_contam'] for result in contam_results}) + + # Wrap the data in two ColumnDataSources + source_visible = ColumnDataSource( + data=dict(col=np.arange(2048), zeros=np.zeros(2048), contam1=order1_contam[0], contam2=order2_contam[0], + contam3=order3_contam[0])) + source_available = ColumnDataSource(data=contam_dict) + + # Define plot elements + plt = figure(width=900, height=300, tools=['reset', 'box_zoom', 'wheel_zoom', 'save']) + plt.line('col', 'contam1', source=source_visible, color='blue', line_width=2, line_alpha=0.6, + legend_label='Order 1') + plt.line('col', 'contam2', source=source_visible, color='red', line_width=2, line_alpha=0.6, legend_label='Order 2') + plt.line('col', 'contam3', source=source_visible, color='green', line_width=2, line_alpha=0.6, + legend_label='Order 3') + glyph1 = VArea(x="col", y1="zeros", y2="contam1", fill_color="blue", fill_alpha=0.3) + plt.add_glyph(source_visible, glyph1) + glyph2 = VArea(x="col", y1="zeros", y2="contam2", fill_color="red", fill_alpha=0.3) + plt.add_glyph(source_visible, glyph2) + glyph3 = VArea(x="col", y1="zeros", y2="contam3", fill_color="green", fill_alpha=0.3) + plt.add_glyph(source_visible, glyph3) + plt.y_range = Range1d(0, min(1, max(np.nanmax(order1_contam), np.nanmax(order2_contam), np.nanmax(order3_contam)))) + plt.x_range = Range1d(0, 2048) + plt.xaxis.axis_label = '' + plt.yaxis.axis_label = 'Contamination / Target Flux' + slider = Slider(title='Position Angle', + value=pa_list[0], + start=min(pa_list), + end=max(pa_list), + step=int((max(pa_list) - min(pa_list)) / (len(pa_list) - 1))) + + span = Span(line_width=2, location=slider.value, dimension='height') + + # Define CustomJS callback, which updates the plot based on selected function by updating the source_visible + callback = CustomJS( + args=dict(source_visible=source_visible, source_available=source_available, span=span), code=""" + var selected_pa = (cb_obj.value).toString(); + var data_visible = source_visible.data; + var data_available = source_available.data; + data_visible['contam1'] = data_available['contam1_' + selected_pa]; + data_visible['contam2'] = data_available['contam2_' + selected_pa]; + data_visible['contam3'] = data_available['contam3_' + selected_pa]; + span.location = cb_obj.value; + source_visible.change.emit(); + """) + + # Make a guide that shows which PAs are unobservable + viz_none = np.array([1 if i in badPA_list else 0 for i in pa_list]) + viz_ord1 = np.array([1 if i > threshold else 0 for i in np.nanmax(order1_contam, axis=1)]) + viz_ord2 = np.array([1 if i > threshold else 0 for i in np.nanmax(order2_contam, axis=1)]) + viz_ord3 = np.array([1 if i > threshold else 0 for i in np.nanmax(order3_contam, axis=1)]) + + # Make the plot + viz_plt = figure(width=900, height=200, x_range=Range1d(0, 359)) + viz_plt.step(np.arange(360), np.mean(order1_contam, axis=1), color='blue', mode="center") + viz_plt.step(np.arange(360), np.mean(order2_contam, axis=1), color='red', mode="center") + viz_plt.step(np.arange(360), np.mean(order3_contam, axis=1), color='green', mode="center") + c1 = viz_plt.vbar(x=np.arange(360), top=viz_ord1, line_color=None, fill_color='blue', alpha=0.2) + c2 = viz_plt.vbar(x=np.arange(360), top=viz_ord2, line_color=None, fill_color='red', alpha=0.2) + c3 = viz_plt.vbar(x=np.arange(360), top=viz_ord3, line_color=None, fill_color='green', alpha=0.2) + c0 = viz_plt.vbar(x=np.arange(360), top=viz_none, color='black', alpha=0.6) + viz_plt.x_range = Range1d(0, 359) + viz_plt.y_range = Range1d(0, 1) + viz_plt.add_layout(span) + + legend = Legend(items=[ + ('Ord 1 > {}% Contaminated'.format(threshold * 100), [c1]), + ('Ord 2 > {}% Contaminated'.format(threshold * 100), [c2]), + ('Ord 3 > {}% Contaminated'.format(threshold * 100), [c3]), + ("Target not visible", [c0]), + ], location=(50, 0), orientation='horizontal', border_line_alpha=0) + viz_plt.add_layout(legend, 'below') + + # Put plot together + slider.js_on_change('value', callback) + layout = column(plt, slider, viz_plt) + + if plot: + show(layout) + + return layout + + +def get_order0(aperture): + """Get the order 0 image for the given aperture - tr = pad_trace[my0:my1, mx0:mx1] * fluxscale - trX, trY = np.shape(tr)[1], np.shape(tr)[0] + Parameters + ---------- + aperture: str + The aperture to use - simuCube[0, 0:trY, 0:trX] = tr + Returns + ------- + np.ndarray + The 2D order 0 image + """ + # Get file + # TODO: Add order 0 files for other modes + if 'NIS' in aperture: + filename = 'NIS_order0.npy' - # Fleshing out indexes 1-361 of the simulation cube - # (trace of neighboring stars at every position angle) - else: + # Get the path to the trace files + trace_path = os.path.join(os.environ['EXOCTK_DATA'], 'exoctk_contam/order0/{}'.format(filename)) - tr = pad_trace[my0:my1, mx0:mx1] * fluxscale - trX, trY = np.shape(tr)[1], np.shape(tr)[0] - simuCube[V3PA + 1, 0:trY, 0:trX] += tr + # Make frame + trace = np.load(trace_path) - return simuCube + return trace -def sossFieldSim(ra, dec, binComp='', dimX=256): - """ Produce a SOSS field simulation for a target. +def get_trace(aperture, teff, verbose=False): + """Get the trace for the given aperture at the given temperature Parameters ---------- - ra: float - The RA of the target. - dec: float - The Dec of the target. - binComp: sequence - The parameters of a binary companion. - dimX: int - The subarray size. + aperture: str + The aperture to use + teff: int + The temperature [K] Returns ------- - simuCub : np.ndarray - The simulated data cube. + np.ndarray + The 2D trace """ + # Get the path to the trace files + traces_path = os.path.join(os.environ['EXOCTK_DATA'], 'exoctk_contam/traces/{}/*.fits'.format('NIS_SUBSTRIP256' if 'NIS' in aperture else aperture)) + + # Glob the file names + trace_files = glob.glob(traces_path) + + # Get closest Teff + teffs = np.array([int(os.path.basename(file).split('_')[-1][:-5]) for file in trace_files]) + file = trace_files[np.argmin((teffs - teff)**2)] + if verbose: + print('Fetching {} {}K trace from {}'.format(aperture, teff, file)) + + # Get data + if 'NIS' in aperture: + # Orders stored separately just in case ;) + traceo1 = fits.getdata(file, ext=0) + traceo2 = fits.getdata(file, ext=1) + traceo3 = fits.getdata(file, ext=2) + + # Zero out background + traceo1[traceo1 < 1] = 0 + traceo2[traceo2 < 1] = 0 + traceo3[traceo3 < 1] = 0 + + trace = [traceo1, traceo2, traceo3] + + else: + trace = [fits.getdata(file)] + + # # Expand to SUBSTRIP256 to FULL frame for NIS_SOSSFULL + # if aperture == 'NIS_SOSSFULL': + # full_trace = np.zeros((2301, 2301)) + # full_trace[:, -257:] = trace + # trace = full_trace + + return trace + + +def old_plot_contamination(targframe_o1, targframe_o2, targframe_o3, starcube, wlims, badPAs=[], title=''): + """ + Plot the contamination + + Parameters + ---------- + targframe: np.ndarray + The frame of target data + starcube: np.ndarray + The cube of star data at each PA + wlims: tuple + The wavelength min and max + badPAs: list + The list of position angles with no visibility + + Returns + ------- + bokeh.layouts.gridplot + The contamination figure + """ + # Data dimensions + PAs, rows, cols = starcube.shape + + for targframe in [targframe_o1, targframe_o2, targframe_o3]: + + + # Remove background values < 1 as it can blow up contamination + targframe = np.where(targframe < 1, 0, targframe) + + # The width of the target trace + peak = targframe.max() + low_lim_col = np.where(targframe > 0.0001 * peak)[1].min() + high_lim_col = np.where(targframe > 0.0001 * peak)[1].max() + + # Using the starcube of shape (PAs, rows, wave), make a frame of (wave, pa) + contam = np.zeros([rows, PAs]) + for row in np.arange(rows): + + # Get the + peakX = np.argmax(targframe[row, :]) + left = peakX - low_lim_col + right = peakX + high_lim_col + + # Calculate weights + tr = targframe[row, left:right] + wt = tr / np.sum(tr**2) + ww = np.tile(wt, PAs).reshape([PAs, tr.size]) + + # Add to contam figure + contam[row, :] = np.sum(starcube[:, row, left:right] * ww, axis=1, where=~np.isnan(starcube[:, row, left:right] * ww)) + + # Log plot contamination, clipping small values + contam = np.log10(np.clip(contam, 1.e-10, 1.)) + + # Hover tool + hover = HoverTool(tooltips=[("Wavelength", "$x"), ("PA", "$y"), ('Value', '@data')], name='contam') + tools = ['pan', 'box_zoom', 'crosshair', 'reset', hover] + trplot = figure(tools=tools, width=600, height=500, title=title, x_range=Range1d(*wlims), y_range=Range1d(0, PAs)) + + # Colors + color_mapper = LinearColorMapper(palette=PuBu[8][::-1][2:], low=-4, high=1) + color_mapper.low_color = 'white' + color_mapper.high_color = 'black' + + # Make the trace plot + source = dict(data=[contam]) + trplot.image(source=source, image='data', x=wlims[0], y=0, dw=wlims[1] - wlims[0], dh=PAs, color_mapper=color_mapper, name='contam') + trplot.xaxis.axis_label = 'Wavelength (um)' + trplot.yaxis.axis_label = 'Aperture Position Angle (degrees)' + color_bar = ColorBar(color_mapper=color_mapper, orientation="horizontal", location=(0, 0)) + trplot.add_layout(color_bar, 'below') + + # Shade bad position angles on the trace plot + nbadPA = len(badPAs) + if nbadPA > 0: + tops = [np.max(badPA) for badPA in badPAs] + bottoms = [np.min(badPA) for badPA in badPAs] + left = [wlims[0]] * nbadPA + right = [wlims[1]] * nbadPA + trplot.quad(top=tops, bottom=bottoms, left=left, right=right, color='#555555', alpha=0.6) + + # # Make a figure summing the contamination at a given PA + # sumplot = figure(tools=tools, width=150, height=500, x_range=Range1d(0, 100), y_range=trplot.y_range, title=None) + # sumplot.line(100 * np.sum(contam >= 0.001, axis=1) / rows, np.arange(PAs) - 0.5, line_color='blue', legend_label='> 0.001') + # sumplot.line(100 * np.sum(contam >= 0.01, axis=1) / rows, np.arange(PAs) - 0.5, line_color='green', legend_label='> 0.01') + # sumplot.xaxis.axis_label = '% channels contam.' + # sumplot.yaxis.major_label_text_font_size = '0pt' - # STEP 1 - # Pulling stars from IRSA point-source catalog - targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit=(u.hour, u.deg)) - targetRA = targetcrd.ra.value - targetDEC = targetcrd.dec.value - info = Irsa.query_region(targetcrd, - catalog='fp_psc', - spatial='Cone', - radius=2.5 * u.arcmin) - - # Coordinates of all stars in FOV, including target - allRA = info['ra'].data.data - allDEC = info['dec'].data.data - Jmag = info['j_m'].data.data - Hmag = info['h_m'].data.data - Kmag = info['k_m'].data.data - - # J-H band, H-K band. This will be used to derive the stellar Temps later - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Determining target index by calculating the relative distance between - # each source and the target. The target will have the smallest distance - # from itself (oof) so whatever that index is will be the targetIndex - aa = ((targetRA - allRA) * np.cos(targetDEC)) - distance = np.sqrt(aa**2 + (targetDEC - allDEC)**2) - targetIndex = np.argmin(distance) - - # Add any missing companion - if binComp != '': - binComp = [float(i) for i in binComp.split(',')] - - deg2rad = np.pi / 180 - bb = binComp[0] / 3600 / np.cos(allDEC[targetIndex] * deg2rad) - allRA = np.append(allRA, (allRA[targetIndex] + bb)) - allDEC = np.append(allDEC, (allDEC[targetIndex] + binComp[1] / 3600)) - Jmag = np.append(Jmag, binComp[2]) - Hmag = np.append(Kmag, binComp[3]) - Kmag = np.append(Kmag, binComp[4]) - J_Hobs = Jmag - Hmag - H_Kobs = Hmag - Kmag - - # Number of stars - nStars = allRA.size - - # Restoring model parameters - modelParam = readsav(os.path.join(TRACES_PATH, 'NIRISS', 'modelsInfo.sav'), - verbose=False) - models = modelParam['models'] - modelPadX = modelParam['modelpadx'] - modelPadY = modelParam['modelpady'] - dimXmod = modelParam['dimxmod'] - dimYmod = modelParam['dimymod'] - jhMod = modelParam['jhmod'] - hkMod = modelParam['hkmod'] - teffMod = modelParam['teffmod'] - - # Find/assign Teff of each star - starsT = np.empty(nStars) - for j in range(nStars): - color_separation = (J_Hobs[j] - jhMod)**2 + (H_Kobs[j] - hkMod)**2 - min_separation_ind = np.argmin(color_separation) - starsT[j] = teffMod[min_separation_ind] - - sweetSpot = dict(x=856, y=107, RA=allRA[targetIndex], - DEC=allDEC[targetIndex], jmag=Jmag[targetIndex]) - - radeg = 180 / np.pi - niriss_pixel_scale = 0.065 # arcsec - # offset between all stars and target - dRA = (allRA - sweetSpot['RA']) * np.cos(sweetSpot['DEC'] / radeg) * 3600 - dDEC = (allDEC - sweetSpot['DEC']) * 3600 - - # Put field stars positions and magnitudes in structured array - _ = dict(RA=allRA, DEC=allDEC, dRA=dRA, dDEC=dDEC, jmag=Jmag, T=starsT, - x=np.empty(nStars), y=np.empty(nStars), dx=np.empty(nStars), - dy=np.empty(nStars)) - stars = np.empty(nStars, - dtype=[(key, val.dtype) for key, val in _.items()]) - for key, val in _.items(): - stars[key] = val - - # Initialize final fits cube that contains the modelled traces - # with contamination - PAmin = 0 # instrument PA, degrees - PAmax = 360 - dPA = 1 # degrees - - # Set of IPA values to cover - PAtab = np.arange(PAmin, PAmax, dPA) # degrees - nPA = len(PAtab) - - dimY = 2048 - # cube of trace simulation at every degree of field rotation, - # +target at O1 and O2 - simuCube = np.zeros([nPA + 2, dimY, dimX]) - - saveFiles = glob.glob( - os.path.join( - TRACES_PATH, - 'NIRISS', - '*modelOrder12*.sav')) - - # Big loop to generate a simulation at each instrument PA - - for kPA in range(PAtab.size): - APA = PAtab[kPA] - print('Generating field at APA : {}'.format(str(APA))) - - sindx = np.sin((np.pi / 2) + APA / radeg) * stars['dDEC'] - cosdx = np.cos((np.pi / 2) + APA / radeg) * stars['dDEC'] - nps = niriss_pixel_scale - stars['dx'] = (np.cos((np.pi / 2) + APA / radeg) * stars['dRA'] - sindx) / nps - stars['dy'] = (np.sin((np.pi / 2) + APA / radeg) * stars['dRA'] + cosdx) / nps - stars['x'] = stars['dx'] + sweetSpot['x'] - stars['y'] = stars['dy'] + sweetSpot['y'] - - # Retain stars that are within the Direct Image NIRISS POM FOV - ind, = np.where((stars['x'] >= -162) & (stars['x'] <= 2047 + 185) & (stars['y'] >= -154) & (stars['y'] <= 2047 + 174)) - starsInFOV = stars[ind] - - for i in range(len(ind)): - intx = round(starsInFOV['dx'][i]) - inty = round(starsInFOV['dy'][i]) - - k = np.where(teffMod == starsInFOV['T'][i])[0][0] - - fluxscale = 10.0**(-0.4 * (starsInFOV['jmag'][i] - sweetSpot['jmag'])) - - # deal with subection sizes. - # these variables will determine where the - # trace will land on the array based on the - # neighbor's position relative to the target's position - mx0 = int(modelPadX - intx) - mx1 = int(modelPadX - intx + dimX) - my0 = int(modelPadY - inty) - my1 = int(modelPadY - inty + dimY) - - if (mx0 > dimXmod) or (my0 > dimYmod): - continue - if (mx1 < 0) or (my1 < 0): - continue - - x0 = (mx0 < 0) * (-mx0) - y0 = (my0 < 0) * (-my0) - mx0 *= (mx0 >= 0) - mx1 = dimXmod if mx1 > dimXmod else mx1 - my0 *= (my0 >= 0) - my1 = dimYmod if my1 > dimYmod else my1 - - # if target and first kPA, add target traces of order 1 and 2 - # in output cube - if (intx == 0) & (inty == 0) & (kPA == 0): - fNameModO12 = saveFiles[k] - - modelO12 = readsav(fNameModO12, verbose=False)['modelo12'] - ord1 = modelO12[0, my0:my1, mx0:mx1] * fluxscale - ord2 = modelO12[1, my0:my1, mx0:mx1] * fluxscale - simuCube[0, y0:y0 + my1 - my0, x0:x0 + mx1 - mx0] = ord1 - simuCube[1, y0:y0 + my1 - my0, x0:x0 + mx1 - mx0] = ord2 - - if (intx != 0) or (inty != 0): - mod = models[k, my0:my1, mx0:mx1] - simuCube[kPA + 2, y0:y0 + my1 - my0, - x0:x0 + mx1 - mx0] += mod * fluxscale - return simuCube + return trplot#gridplot(children=[[trplot, sumplot]]) if __name__ == '__main__': ra, dec = "04 25 29.0162", "-30 36 01.603" # Wasp 79 - # sossFieldSim(ra, dec) - if EXOCTK_DATA: - fieldSim(ra, dec, instrument='NIRISS') + field_simulation(ra, dec, 'NIS_SUBSTRIP256') diff --git a/exoctk/contam_visibility/make_contam_traces.py b/exoctk/contam_visibility/make_contam_traces.py new file mode 100644 index 00000000..4f1ca9c7 --- /dev/null +++ b/exoctk/contam_visibility/make_contam_traces.py @@ -0,0 +1,93 @@ +import numpy as np +import os +from astropy.io import fits +from bokeh.plotting import figure, show +from bokeh.io import output_notebook +from hotsoss.plotting import plot_frame + + +def generate_pandeia_traces(min_teff=2800, max_teff=6000, increment=100, norm_mag=10., outdir=None): + """ + Generate the precomputed traces for a range of Teff values + to be used by the contamination tool + + Parameters + ---------- + min_teff: int + The minimum Teff to calculate + max_teff: int + The maximum Teff to calculate + increment: int + The increment in Teff space to use + norm_mag: float + The magnitude to normalize to + outdir: str + The path for the generated files + + """ + from pandeia.engine.calc_utils import build_default_calc + from pandeia.engine.perform_calculation import perform_calculation + + modes = {'NIS_SUBSTRIP256': {'inst': 'niriss', 'mode': 'soss', 'subarray': 'substrip256'}, + 'NIS_SUBSTRIP96': {'inst': 'niriss', 'mode': 'soss', 'subarray': 'substrip96'}, + 'MIRIM_SLITLESSPRISM': {'inst': 'miri', 'mode': 'lrsslitless'}, + 'NRS_S1600A1_SLIT_PRISM_CLEAR': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'prism', 'filter': 'clear'}, + 'NRS_S1600A1_SLIT_G140M_F070LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g140m', 'filter': 'f070lp'}, + 'NRS_S1600A1_SLIT_G140M_F100LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g140m', 'filter': 'f100lp'}, + 'NRS_S1600A1_SLIT_G235M_F170LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g235m', 'filter': 'f170lp'}, + 'NRS_S1600A1_SLIT_G395M_F290LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g395m', 'filter': 'f290lp'}, + 'NRS_S1600A1_SLIT_G140H_F070LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g140h', 'filter': 'f070lp'}, + 'NRS_S1600A1_SLIT_G140H_F100LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g140h', 'filter': 'f100lp'}, + 'NRS_S1600A1_SLIT_G235H_F170LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g235h', 'filter': 'f170lp'}, + 'NRS_S1600A1_SLIT_G395H_F290LP': {'inst': 'nirspec', 'mode': 'bots', 'disperser': 'g395h', 'filter': 'f290lp'}, + 'NRCA5_GRISM256_F322W2': {'inst': 'nircam', 'mode': 'wfgrism', 'filter': 'f322w2'}, + 'NRCA5_GRISM256_F356W': {'inst': 'nircam', 'mode': 'wfgrism', 'filter': 'f356w'}, + 'NRCA5_GRISM256_F277W': {'inst': 'nircam', 'mode': 'wfgrism', 'filter': 'f277w'}, + 'NRCA5_GRISM256_F444W': {'inst': 'nircam', 'mode': 'wfgrism', 'filter': 'f444w'}} + + for name, mode in modes.items(): + + # Configure the instrument + configuration = build_default_calc("jwst", mode['inst'], mode['mode']) + if 'filter' in mode: + configuration['configuration']['instrument']['filter'] = mode.get('filter') + if 'disperser' in mode: + configuration['configuration']['instrument']['disperser'] = mode.get('disperser') + if 'subarray' in mode: + configuration['configuration']['detector']['subarray'] = mode.get('subarray', configuration['configuration']['detector']['subarray']) + + # Set the scene + scene = {} + scene['position'] = {'x_offset': 0., 'y_offset': 0., 'orientation': 0., 'position_parameters': ['x_offset', 'y_offset', 'orientation']} + scene['shape'] = {'geometry': 'point'} + scene['spectrum'] = {'name': 'Phoenix Spectrum', 'spectrum_parameters': ['sed', 'normalization']} + scene['spectrum']['normalization'] = {'type': 'jwst', 'bandpass': 'niriss,imaging,f480m', 'norm_flux': norm_mag, 'norm_fluxunit': 'vegamag'} + + # Perform the calculation + for teff in np.arange(min_teff, max_teff + increment, increment): + + print("Generating {} {}...".format(name, teff)) + + # Set the temperature + scene['spectrum']['sed'] = {'sed_type': 'phoenix', 'teff': teff, 'log_g': 5.0, 'metallicity': 0.0} + + # Set the scene + configuration['scene'][0] = scene + + # Perform calculation + report = perform_calculation(configuration, webapp=False) + + # Set directory for output + if outdir is None: + outdir = os.path.join(os.environ['EXOCTK_DATA'], 'exoctk_contam/traces') + + fullpath = os.path.join(outdir, name) + if not os.path.exists(fullpath): + os.system('mkdir {}'.format(fullpath)) + + # Save the file + hdu0 = fits.PrimaryHDU() + hdu1 = fits.ImageHDU([report['2d']['detector']]) + hdulist = fits.HDUList([hdu0, hdu1]) + hdulist.writeto(os.path.join(fullpath, '{}_{}.fits'.format(name, int(teff))), overwrite=True) + print("Saved {}".format(fullpath)) \ No newline at end of file diff --git a/exoctk/contam_visibility/new_vis_plot.py b/exoctk/contam_visibility/new_vis_plot.py new file mode 100644 index 00000000..4cddc59b --- /dev/null +++ b/exoctk/contam_visibility/new_vis_plot.py @@ -0,0 +1,71 @@ +from astropy.time import Time + +from bokeh.models import Band, ColumnDataSource, HoverTool +from bokeh.plotting import figure, show + +from jwst_gtvt.jwst_tvt import Ephemeris +from jwst_gtvt.plotting import get_visibility_windows + + +def get_exoplanet_positions(ra, dec, in_FOR=None): + """Use the jwst_gtvt to obtain positions of exoplanet. + """ + + eph = Ephemeris() + exoplanet_data = eph.get_fixed_target_positions(ra, dec) + + if in_FOR is None: + return exoplanet_data + else: + return exoplanet_data.loc[exoplanet_data['in_FOR']==in_FOR] + + +def build_visibility_plot(target_name, instrument, ra, dec): + """Build bokeh figure for visibility windows + """ + + instrument = instrument.upper() + + if instrument not in ['NIRCAM', 'NIRISS', 'MIRI', 'NIRSPEC']: + raise ValueError(f'{instrument} not supported for this tool!') + + nominal_angle_column_name = instrument + '_nominal_angle' + min_pa_column_name = instrument + '_min_pa_angle' + max_pa_column_name = instrument + '_max_pa_angle' + + # obtain exoplanet data and filter visibility windows + exoplanet_df = get_exoplanet_positions(ra, dec, in_FOR=True) + window_indices = get_visibility_windows(exoplanet_df.index.tolist()) + + exoplanet_df['times'] = Time(exoplanet_df['MJD'], format='mjd').datetime + + # source = ColumnDataSource(exoplanet_df) + + # define bokeh figure + TOOLTIPS = [ + ("Date", "@times{%F}"), + ("Nominal Position Angle", "@{}".format(nominal_angle_column_name)), + ("Min Position Angle", "@{}".format(min_pa_column_name)), + ("Max Position Angle", "@{}".format(max_pa_column_name)),] + + p = figure(title=f"{target_name} Visibility with {instrument}", + height=400, width=800, x_axis_type='datetime') + + p.xaxis.axis_label = 'Date' + p.yaxis.axis_label = 'Available Aperture Position Angles (Degrees)' + + for start, end in window_indices: + data_to_plot = exoplanet_df.loc[start:end] + source = ColumnDataSource(data_to_plot) + + p.line("times", instrument + "_nominal_angle", line_dash=(10, 7), line_width=1, source=source, legend_label="Nominal Angle") + + band = Band(base='times', lower=instrument + '_min_pa_angle', upper=instrument + '_max_pa_angle', source=source, + level='underlay', fill_alpha=1.0, line_width=4, line_color='green') + + p.add_layout(band) + p.xaxis.major_label_orientation = 3.14/4 + + p.add_tools(HoverTool(tooltips=TOOLTIPS, formatters={'@times': 'datetime'})) + + return p \ No newline at end of file diff --git a/exoctk/contam_visibility/source_catalog.py b/exoctk/contam_visibility/source_catalog.py new file mode 100644 index 00000000..9cd4235f --- /dev/null +++ b/exoctk/contam_visibility/source_catalog.py @@ -0,0 +1,2239 @@ +#! /usr/bin/env python + +""" +Tools for generating Mirage-compatible catalogs from surveys +Note that once you have mirage-formatted catalogs, there is an "add_catalog" function +in catalog_generator.py that can combine catalogs +""" + +from collections import OrderedDict +import copy +import logging +import math +import numpy as np +import os +import pkg_resources +import re + +from astropy.coordinates import SkyCoord, Galactic +from astropy.io import ascii +from astropy.table import Table, join +import astropy.units as u +from astroquery.gaia import Gaia +from astroquery.ipac.irsa import Irsa +from pysiaf.utils.projection import deproject_from_tangent_plane + + +def source_catalog(ra, dec, box_width): + # Get 2MASS, WISE, and Gaia sources + gaia_cat, gaia_mag_cols, gaia_2mass, gaia_2mass_crossref, gaia_wise, gaia_wise_crossref = query_GAIA_ptsrc_catalog(ra, dec, box_width) + twomass_cat, twomass_cols = query_2MASS_ptsrc_catalog(outra, outdec, box_width) + wise_cat, wise_cols = query_WISE_ptsrc_catalog(outra, outdec, box_width, wise_catalog='allwise') + + + +# from mirage.apt.apt_inputs import get_filters, ra_dec_update +# from mirage.catalogs.catalog_generator import PointSourceCatalog, GalaxyCatalog, \ +# ExtendedCatalog, MovingPointSourceCatalog, MovingExtendedCatalog, \ +# MovingSersicCatalog +# from mirage.logging import logging_functions +# from mirage.utils.constants import FGS_FILTERS, NIRCAM_FILTERS, NIRCAM_PUPIL_WHEEL_FILTERS, \ +# NIRISS_FILTERS, NIRISS_PUPIL_WHEEL_FILTERS, NIRCAM_2_FILTER_CROSSES, NIRCAM_WL8_CROSSING_FILTERS, \ +# NIRCAM_CLEAR_CROSSING_FILTERS, NIRCAM_GO_PW_FILTER_PAIRINGS, LOG_CONFIG_FILENAME, STANDARD_LOGFILE_NAME +# from mirage.utils import siaf_interface +# from mirage.utils.utils import ensure_dir_exists, make_mag_column_names, standardize_filters + + +def query_2MASS_ptsrc_catalog(ra, dec, box_width): + """ + Query the 2MASS All-Sky Point Source Catalog in a square region around + the RA and Dec provided. Box width must be in units of arcseconds + + Parameters + ---------- + ra : float or str + Right ascention of the center of the catalog. Can be decimal degrees + or HMS string + + dec : float or str + Declination of the center of the catalog. Can be decimal degrees of + DMS string + + box_width : float + Width of the box in arcseconds containing the catalog. + + Returns + ------- + query_table : astropy.table.Table + Catalog composed of MaskedColumns containing 2MASS sources + + magnitude_column_names : list + List of column header names corresponding to columns containing + source magnitude + """ + # Don't artificially limit how many sources are returned + Irsa.ROW_LIMIT = -1 + + ra_dec_string = "{} {}".format(ra, dec) + query_table = Irsa.query_region(ra_dec_string, catalog='fp_psc', spatial='Box', + width=box_width * u.arcsec) + + # Exclude any entries with missing RA or Dec values + radec_mask = filter_bad_ra_dec(query_table) + query_table = query_table[radec_mask] + + # Column names of interest + magnitude_column_names = ['j_m', 'h_m', 'k_m'] + return query_table, magnitude_column_names + + +# def get_wise_ptsrc_catalog(ra, dec, box_width, wise_catalog='allwise'): +# """Wrapper around WISE query and creation of mirage-formatted catalog +# +# Parameters +# ---------- +# ra : float or str +# Right ascention of the center of the catalog. Can be decimal degrees +# or HMS string +# +# dec : float or str +# Declination of the center of the catalog. Can be decimal degrees of +# DMS string +# +# wise_catalog : str +# Switch that specifies the WISE catalog to be searched. Possible values are +# 'ALLWISE' (default), which searches the ALLWISE source catalog, or +# 'WISE_all_sky', which searches the older WISE All-Sky catalog. +# """ +# wise_cat, wise_mag_cols = query_WISE_ptsrc_catalog(ra, dec, box_width, wise_catalog=wise_catalog) +# wise_mirage = mirage_ptsrc_catalog_from_table(wise_cat, 'WISE', wise_mag_cols) +# return wise_mirage, wise_cat + + +def query_WISE_ptsrc_catalog(ra, dec, box_width, wise_catalog='ALLWISE'): + """Query the WISE All-Sky Point Source Catalog in a square region around the RA and Dec + provided. Box width must be in units of arcseconds + + Parameters + ---------- + ra : float or str + Right ascention of the center of the catalog. Can be decimal degrees + or HMS string + + dec : float or str + Declination of the center of the catalog. Can be decimal degrees of + DMS string + + box_width : float + Width of the box in arcseconds containing the catalog. + + wise_catalog : str + Switch that specifies the WISE catalog to be searched. Possible values are + 'ALLWISE' (default), which searches the ALLWISE source catalog, or + 'WISE_all_sky', which searches the older WISE All-Sky catalog. + + Returns + ------- + query_table : astropy.table.Table + Catalog composed of MaskedColumns containing WISE sources + + magnitude_column_names : list + List of column header names corresponding to columns containing source magnitude + """ + # List of columns to be retrieved from the WISE catalog. For the case of the ALLWISE + # catalog, the 2mass columns we need are not returned by default, so we need to explicitly + # list all of the columns we want. + cols = 'ra,dec,w1mpro,w2mpro,w3mpro,w4mpro,w1sigmpro,w2sigmpro,w3sigmpro,w4sigmpro,j_m_2mass,h_m_2mass,k_m_2mass' + + # Determine which WISE catalog will be searched + if wise_catalog.lower() == 'allwise': + search_cat = 'allwise_p3as_psd' + elif wise_catalog.lower() == 'wise_all_sky': + search_cat = 'allsky_4band_p3as_psd' + else: + raise ValueError(('{}: Unrecognized WISE catalog version to be searched. wise_catalog should ' + 'be "allwise" or "wise_all_sky".'.format(wise_catalog))) + + # Don't artificially limit how many sources are returned + Irsa.ROW_LIMIT = -1 + + ra_dec_string = "{} {}".format(ra, dec) + query_table = Irsa.query_region(ra_dec_string, catalog=search_cat, spatial='Box', + width=box_width * u.arcsec, selcols=cols) + + # Exclude any entries with missing RA or Dec values + radec_mask = filter_bad_ra_dec(query_table) + query_table = query_table[radec_mask] + + # Column names of interest + magnitude_column_names = ['w1mpro', 'w2mpro', 'w3mpro', 'w4mpro'] + return query_table, magnitude_column_names + + +# def mirage_ptsrc_catalog_from_table(table, instrument, mag_colnames, magnitude_system='vegamag'): +# """Create a mirage-formatted point source catalog from an input astropy +# table (e.g. from one of the query functions), along with the magnitude +# column names of interest +# +# Parameters +# ---------- +# table : astropy.table.Table +# Source catalog from (e.g.) Gaia or 2MASS query +# +# instrument : str +# Unique identifier for where data came from (e.g. '2MASS', 'WISE', 'nircam_f200w') +# +# mag_colnames : list +# List of strings corresponding to the columns in 'table' that contain magnitude values +# +# magnitude_system : str +# This is the label for the magnitude system, 'vegamag', 'abmag', or 'stmag'. +# """ +# cat = PointSourceCatalog(ra=table['ra'].data.data, dec=table['dec'].data.data) +# +# for magcol in mag_colnames: +# data = table[magcol].filled().data +# cat.add_magnitude_column(data, instrument=instrument, filter_name=magcol, +# magnitude_system=magnitude_system) +# return cat +# +# +# def get_2MASS_ptsrc_catalog(ra, dec, box_width): +# """Wrapper around 2MASS query and creation of mirage-formatted catalog +# +# Parameters +# ---------- +# ra : float or str +# Right ascention of the center of the catalog. Can be decimal degrees or HMS string +# +# dec : float or str +# Declination of the center of the catalog. Can be decimal degrees of DMS string +# +# box_width : float +# Width of the box in arcseconds containing the catalog. +# +# Returns +# ------- +# twomass : tup +# 2-element tuple. The first is a PointSourceCatalog object containing +# the 2MASS source information. The second is an astropy.table.Table +# object containing the exact query results from 2MASS. Note that these +# outputs will contain only JHK magnitudes for the returned sources. +# """ +# twomass_cat, twomass_mag_cols = query_2MASS_ptsrc_catalog(ra, dec, box_width) +# twomass_mirage = mirage_ptsrc_catalog_from_table(twomass_cat, '2MASS', twomass_mag_cols) +# return twomass_mirage, twomass_cat +# +# +# def twoMASS_plus_background(ra, dec, box_width, kmag_limits=(17, 29), email='', seed=None): +# """Convenience function to create a catalog from 2MASS and add a population of +# fainter stars. In this case, cut down the magnitude limits for the call to the +# Besancon model so that we don't end up with a double population of bright stars +# +# Parameters +# ---------- +# ra : float or str +# Right ascention of the center of the catalog. Can be decimal degrees +# or HMS string +# +# dec : float or str +# Declination of the center of the catalog. Can be decimal degrees of +# DMS string +# +# box_width : float +# Width of the box in arcseconds containing the catalog. +# +# kmag_limits : tup +# Tuple of minimum and maximum K magnitude values to use in Besancon +# model query for background stars +# +# email : str +# A valid email address is necessary to perform a Besancon query +# +# seed : int +# Seed to use in the random number generator when choosing RA and +# Dec values for Besancon sources. +# """ +# two_mass, twomass_cat = get_2MASS_ptsrc_catalog(ra, dec, box_width) +# background, background_cat = besancon(ra, dec, box_width, coords='ra_dec', email=email, seed=seed) +# two_mass.add_catalog(background) +# return two_mass +# +# +# def get_all_catalogs(ra, dec, box_width, besancon_catalog_file=None, instrument='NIRISS', filters=[], +# ra_column_name='RAJ2000', dec_column_name='DECJ2000', starting_index=1, wise_catalog='allwise'): +# """ +# This is a driver function to query the GAIA/2MASS/WISE catalogues +# plus the Besancon model and combine these into a single JWST source list. +# The routine cuts off the bright end of the Besancon model to match the input +# 2MASS catalogue, so as to avoid getting too many bright stars in the +# output catalogue. +# For the observed values, the code interpolates the photon-weighted mean +# flux density values in wavelength to get the NIRCam/NIRISS/Guider magnitudes. +# This process is nominally OK when there is extinction, but only when the +# spectrum is relatively smooth (so it is suspect for stars with very deep +# moelcular bands over the near-infrared wavelengths). +# For the Bescancon values, the VJHKL magnitudes are matched to Kurucz models +# and then the extinction is applied. This process is limited by the range +# of Kurucz models considered., and to some degree by the simple extinction +# model used in the Besancon code. +# +# Parameters +# ---------- +# ra : float or str +# Right ascension of the target field in degrees or HMS +# +# dec : float or str +# Declination of the target field in degrees or DMS +# +# box_width : float +# Size of the (square) target field in arc-seconds +# +# besancon_catalog_file : str +# Name of ascii catalog containing background stars. The code was +# developed around this being a catalog output by the Besaoncon +# model (via ``besancon()``), but it can be any ascii catalog +# of sources as long as it contains the columns specified in +# the ``catalog`` parameter of ``johnson_catalog_to_mirage_catalog()`` +# If None, the Besancon step will be skipped and a catalog will be +# built using only GAIA/2MASS/WISE +# +# instrument : str +# One of "all", "NIRISS", "NIRCam", or "Guider" +# +# filters : list +# Either an empty list (which gives all filters) or a list +# of filter names (i.e. F090W) to be calculated. +# +# ra_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# right ascension of the sources. +# +# dec_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# declination of the sources +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# wise_catalog : str +# Switch that specifies the WISE catalog to be searched. Possible values are +# 'allwise' (default), which searches the ALLWISE source catalog, or +# 'wise_all_sky', which searches the older WISE All-Sky catalog. +# +# Returns +# ------- +# source_list : mirage.catalogs.create_catalogs.PointSourceCatalog +# A table with the filter magnitudes +# +# filter_names : list +# A list of the filter name header strings for writing to +# an output file. +# """ +# logger = logging.getLogger('mirage.catalogs.create_catalog.get_all_catalogs') +# +# if isinstance(ra, str): +# pos = SkyCoord(ra, dec, frame='icrs') +# outra = pos.ra.deg +# outdec = pos.dec.deg +# else: +# outra = ra +# outdec = dec +# +# # Normalize filter names. Transform any shorthand into a full filter +# # specification. (e.g. "F090W" -> "W090W/CLEAR") +# filters = standardize_filters(instrument, filters) +# filter_names = make_mag_column_names(instrument, filters) +# +# gaia_cat, gaia_mag_cols, gaia_2mass, gaia_2mass_crossref, gaia_wise, \ +# gaia_wise_crossref = query_GAIA_ptsrc_catalog(outra, outdec, box_width) +# twomass_cat, twomass_cols = query_2MASS_ptsrc_catalog(outra, outdec, box_width) +# wise_cat, wise_cols = query_WISE_ptsrc_catalog(outra, outdec, box_width, wise_catalog=wise_catalog) +# +# if besancon_catalog_file is not None: +# filter_dict = {instrument: filters} +# besancon_jwst = johnson_catalog_to_mirage_catalog(besancon_catalog_file, filter_dict, ra_column_name=ra_column_name, +# dec_column_name=dec_column_name, magnitude_system='vegamag') +# +# # Combine data from GAIA/2MASS/WISE to create single catalog with JWST filters +# observed_jwst = combine_and_interpolate(gaia_cat, gaia_2mass, gaia_2mass_crossref, gaia_wise, +# gaia_wise_crossref, twomass_cat, wise_cat, instrument, filters, +# starting_index=starting_index) +# +# if besancon_catalog_file is not None: +# logger.info('Adding %d sources from Besancon to %d sources from the catalogues.' % (len(besancon_jwst.ra), +# len(observed_jwst.ra))) +# source_list = combine_catalogs(observed_jwst, besancon_jwst, starting_index=starting_index) +# else: +# source_list = observed_jwst +# return source_list, filter_names +# +# +# def transform_johnson_to_jwst(johnson_cat, filter_names): +# """ +# Given the output from a Besancon model, transform to JWST magnitudes by +# making a best match to the standard BOSZ model colours for the VJHKL +# magnitudes, then taking the BOSZ results for the JWST filters. Apply +# any ISM extinction after the matching. +# One possible improvement would be to interpolate between the best matches +# rather than just taking the best one. +# +# Parameters +# ---------- +# johnson_cat : astropy.table.Table +# Table containing sources with magnitudes in Johnson filters. +# V, J, H, and K are required, along with Av (ISM extinction) +# +# filter_names : list +# The list of the requried output NIRCam/NIRISS/Guider filter names. +# +# Returns +# ------- +# johnson_cat : astropy.table.Table +# Modified table with new columns for JWST filter-based magnitudes +# """ +# standard_magnitudes, standard_values, standard_filters, standard_labels = read_standard_magnitudes() +# nstars = len(johnson_cat['V'].data) +# nfilters = len(filter_names) +# +# # out_magnitudes is a two-dimensional float numpy array with the star +# # number in the first dimension and the estimated JWST magnitudes +# # in the second dimension. A value of None is returned if the matching +# # fails. This should not happen with the regular inputs. +# out_magnitudes = np.zeros((nstars, nfilters), dtype=np.float32) +# inds = crossmatch_filter_names(filter_names, standard_filters) +# in_magnitudes = np.zeros((4), dtype=np.float32) +# +# in_filters = ['Johnson V', 'Johnson J', 'Johnson H', 'Johnson K'] +# for loop in range(nstars): +# # Exclude any bad values returned by the Besancon query +# if johnson_cat['V'].data[loop] < 90: +# in_magnitudes[0] = johnson_cat['V'].data[loop] +# in_magnitudes[1] = johnson_cat['J'].data[loop] +# in_magnitudes[2] = johnson_cat['H'].data[loop] +# in_magnitudes[3] = johnson_cat['K'].data[loop] +# newmags = match_model_magnitudes(in_magnitudes, in_filters, standard_magnitudes, standard_values, +# standard_filters, standard_labels) +# if newmags is None: +# return None +# newmags = johnson_cat['Av'].data[loop] * standard_values[3, :] + newmags +# out_magnitudes[loop, :] = newmags[inds] +# else: +# out_magnitudes[loop, :] = np.repeat(99., len(inds)) +# +# # Create new columns for the JWST filter-based magnitudes +# for magnitudes, filter_name in zip(out_magnitudes.transpose(), filter_names): +# johnson_cat[filter_name] = magnitudes +# +# return johnson_cat +# +# +# def catalog_colors_to_vjhk(color_catalog): +# """Given a Table containing a catalog of sources with V magnitudes as +# well as colors, generate J, H, and K magnitudes for all sources. +# +# Parameters +# ---------- +# +# color_catalog : astropy.table.Table +# Table of sources. Should have columns named: 'K', 'V-K', 'J-H', +# and 'J-K' +# +# Returns +# ------- +# +# color_catalog : astropy.table.Table +# Modified Table with 'J', 'H', 'K' columns added +# """ +# # Calculate magnitudes +# kmags = color_catalog['K'].data +# vmags = (color_catalog['K'] + color_catalog['V-K']).data +# jmags = kmags + color_catalog['J-K'].data +# hmags = jmags - color_catalog['J-H'].data +# +# # Add to table +# color_catalog['J'] = jmags +# color_catalog['H'] = hmags +# color_catalog['V'] = kmags +# return color_catalog +# +# +# def johnson_catalog_to_mirage_catalog(catalog_file, filters, ra_column_name='RAJ2000', dec_column_name='DECJ2000', +# magnitude_system='abmag', output_file=None, starting_index=1): +# """Create a Mirage-formatted catalog containing sources from a query of the Besancon model +# +# Parameters +# ---------- +# catalog_file : str +# Name of ascii file containing the input catalog. The catalog must have +# RA and Dec columns (whose names can be specified using ``ra_column_name`` +# and ``dec_column_name``), as well as a ``K`` column containing source +# magnitudes in the K band, and an ``Av`` column containing ISM extinction. +# It must also have either: 1) ``J``, ``H``, and ``V`` columns with +# appropriate magnitudes, or 2) ``V-K``, ``J-K``, and ``J-H`` columns. +# +# filters : dict +# Dictionary with keys equal to JWST instrument names, and values +# that are lists of filter names. Besancon sources will have magnitudes +# transformed into these filters +# +# ra_column_name : str +# Name of the column in the input catalog that contains the Right Ascension +# values for the sources +# +# dec_column_name : str +# Name of the column in the input catalog that contains the Declination +# values for the sources +# +# magnitude_system : str +# Magnitude system of the values in the catalog. Options are 'abmag', 'stmag', +# and 'vegamag' +# +# output_file : str +# If not None, save the catalog to a file with this name +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# Returns +# ------- +# transformed_besancon_cat : mirage.catalogs.create_catalog.PointSourceCatalog +# Catalog containing Besancon model stars with magnitudes in requested +# JWST filters +# """ +# # Check the input magnitude system +# if magnitude_system not in ['abmag', 'stmag', 'vegamag']: +# raise ValueError(("ERROR: magnitude_system for {} must be one of: 'abmag', 'stmag', 'vegamag'.") +# .format(catalog_file)) +# +# # Create list of filter names from filters dictionary +# all_filters = [] +# for instrument in filters: +# filt_list = make_mag_column_names(instrument.lower(), filters[instrument]) +# all_filters.extend(filt_list) +# +# # Read in the input catalog +# catalog = ascii.read(catalog_file) +# +# # Quick check to be sure required columns are present +# req_cols = ['K', 'Av', ra_column_name, dec_column_name] +# for colname in req_cols: +# if colname not in catalog.colnames: +# raise ValueError(('ERROR: Required column {} is missing from {}.'.format(colname, catalog_file))) +# +# # Check which columns are present to see if colors need to be translated +# # into magnitudes +# if 'J' in catalog.colnames and 'H' in catalog.colnames and 'V' in catalog.colnames: +# calculate_magnitudes_from_colors = False +# else: +# if 'V-K' in catalog.colnames and 'J-K' in catalog.colnames and 'J-H' in catalog.colnames: +# calculate_magnitudes_from_colors = True +# else: +# raise ValueError(("ERROR: {} must contain either 'J', 'H', and 'K' columns, or 'V-K', 'J-K' and " +# "'J-H' columns").format(catalog_file)) +# +# # If the input catalog contains only V magnitudes and colors, calculate +# # the magnitudes in the other Johnson filters +# if calculate_magnitudes_from_colors: +# catalog = catalog_colors_to_vjhk(catalog) +# +# # Translate Johnson-based filter magnitudes into JWST filter-based +# # magnitudes for the requested filters. +# catalog = transform_johnson_to_jwst(catalog, all_filters) +# +# # Extract the relevant columns and create a Mirage-formatted point +# # source catalog +# mirage_cat = PointSourceCatalog(ra=catalog[ra_column_name].data, +# dec=catalog[dec_column_name].data, +# starting_index=starting_index) +# +# for filt in all_filters: +# #instrument, filter_name, _ = filt.split('_') +# mirage_cat.add_magnitude_column(catalog[filt], column_name=filt) +# +# # Save to output file if requested +# if output_file is not None: +# mirage_cat.save(output_file) +# +# return mirage_cat +# +# +# def crossmatch_filter_names(filter_names, standard_filters): +# """Return a list of filter names matching those of standard filters +# +# Parameters +# ---------- +# filter_names : list +# List of input filter names +# +# standard_filters : list +# List of recognized filter names +# +# Returns +# ------- +# inds : list +# List of matching filter names +# """ +# inds = [] +# for filter_name in filter_names: +# name_to_match = filter_name +# +# # Weak lens inputs can be WLP8 or WLM8, but we want to match either +# # with the WLP8 entries in the standard magnitude list +# if 'wlm' in filter_name: +# name_to_match = filter_name.replace('wlm', 'wlp') +# +# index = standard_filters.index(name_to_match) +# inds.append(index) +# return inds +# +# +# def match_model_magnitudes(in_magnitudes, in_filters, standard_magnitudes, +# standard_values, standard_filters, standard_labels): +# """ +# This code attempts to make the best match between a set of input magnitudes +# and a set of BOSZ simulated magnitudes. It is assumed that the input +# magnitudes are not reddened. The match is for the smallest root-mean-square +# deviation between the input magnitudes and the BOSZ model magnitudes, with +# an overall offset factor applied to the latter. +# +# Parameters +# ---------- +# in_magnitudes : numpy.ndarray +# The list of (A0V) magnitude values to match. +# +# in_filters : list +# The labels for the magnitudes. +# +# standard_magntudes : numpy,ndarray +# 2D array of standard simulated magnitude values. +# +# standard_values : numpy.ndarray +# 2D array of other filter values (wavelengths, +# zero magnitude flux density values, extinction) +# +# standard_filters : list +# The list of the standard magnitude filter names +# +# standard_labels : list +# The labels for the input stellar atmosphere models +# used to calculate the standard magnitudes. +# +# Returns +# ------- +# out_magnitudes : numpy.ndarray or None +# 1D array of the full set of estimated magnitudes from the model +# matching, or None if a problem occurs. +# """ +# logger = logging.getLogger('mirage.catalogs.create_catalog.match_model_magnitudes') +# inds = crossmatch_filter_names(in_filters, standard_filters) +# nmatch = float(len(inds)) +# if nmatch != len(in_filters): +# logger.warning('Error in matching the requested filters for model matching.') +# return None +# +# subset = np.copy(standard_magnitudes[:, inds]) +# del1 = subset - in_magnitudes +# offset = np.mean(del1, axis=1) +# offset_exp = np.expand_dims(offset, axis=1) +# offset_stack = np.repeat(offset_exp, len(in_magnitudes), axis=1) +# delm = (subset - offset_stack - in_magnitudes) +# rms = np.sqrt(np.sum(delm * delm, axis=1) / nmatch) +# smallest = np.where(rms == np.min(rms))[0][0] +# out_magnitudes = standard_magnitudes[smallest, :] - offset[smallest] +# +# return out_magnitudes +# +# +# def read_standard_magnitudes(): +# """ +# The code reads a file magslist_bosz_normal_mirage1.new to get the simulated +# magnitudes. This file gives simulated magnitudes for the Johnson VJHKL +# filters, the 2MASS filters, the WISE filters, the GAIA filters, and the +# JWST filters. Also some filter specific values are read from the header +# lines of the file. +# +# Returns +# ------- +# standard_magntudes : numpy.ndarray +# 2D array of standard simulated magnitude values. +# +# standard_values : numpy.ndarray +# 2D array of other filter values (wavelengths, +# zero magnitude flux density values, extinction) +# +# standard_filters : list +# The list of the standard magnitude filter names +# +# standard_labels : list +# The labels for the input stellar atmosphere models +# used to calculate the standard magnitudes. +# """ +# # read in the values needed to transform the Besancon model magnitudes +# # +# module_path = pkg_resources.resource_filename('mirage', '') +# standard_mag_file = os.path.join(module_path, 'config/magslist_bosz_normal_mirage.new') +# with open(standard_mag_file, 'r') as infile: +# lines = infile.readlines() +# +# standard_magnitudes = np.loadtxt(standard_mag_file, comments='#') +# # standard_values holds the wavelengths (microns), zero magnitude flux +# # density values (W/m^2/micron and Jy) and the relative ISM extinction +# standard_values = np.zeros((4, 73), dtype=np.float32) +# # The following list is manually produced, but must match the order +# # of filters in the input file, where the names are a bit different. +# # Note that for the GAIA g filter the trailing space is needed to +# # allow the code to differentiate the G, BP, and RP filters. +# standard_filters = ['Johnson V', 'Johnson J', 'Johnson H', 'Johnson K', +# '2MASS J', '2MASS H', '2MASS Ks', 'Johnson L', +# 'WISE W1', 'WISE W2', 'WISE W3', 'WISE W4', 'GAIA g ', +# 'GAIA gbp', 'GAIA grp', +# 'niriss_f090w_magnitude', 'niriss_f115w_magnitude', +# 'niriss_f140m_magnitude', 'niriss_f150w_magnitude', +# 'niriss_f158m_magnitude', 'niriss_f200w_magnitude', +# 'niriss_f277w_magnitude', 'niriss_f356w_magnitude', +# 'niriss_f380m_magnitude', 'niriss_f430m_magnitude', +# 'niriss_f444w_magnitude', 'niriss_f480m_magnitude', +# 'fgs_guider1_magnitude', 'fgs_guider2_magnitude', +# 'nircam_f070w_clear_magnitude', 'nircam_f090w_clear_magnitude', +# 'nircam_f115w_clear_magnitude', 'nircam_f140m_clear_magnitude', +# 'nircam_f150w_clear_magnitude', 'nircam_f150w2_clear_magnitude', +# 'nircam_f150w2_f162m_magnitude', 'nircam_f150w2_f164n_magnitude', +# 'nircam_f182m_clear_magnitude', 'nircam_f187n_clear_magnitude', +# 'nircam_f200w_clear_magnitude', 'nircam_f210m_clear_magnitude', +# 'nircam_f212n_clear_magnitude', 'nircam_f250m_clear_magnitude', +# 'nircam_f277w_clear_magnitude', 'nircam_f300m_clear_magnitude', +# 'nircam_f322w2_clear_magnitude', 'nircam_f322w2_f323n_magnitude', +# 'nircam_f335m_clear_magnitude', 'nircam_f356w_clear_magnitude', +# 'nircam_f360m_clear_magnitude', 'nircam_f444w_f405n_magnitude', +# 'nircam_f410m_clear_magnitude', 'nircam_f430m_clear_magnitude', +# 'nircam_f444w_clear_magnitude', 'nircam_f460m_clear_magnitude', +# 'nircam_f444w_f466n_magnitude', 'nircam_f444w_f470n_magnitude', +# 'nircam_f480m_clear_magnitude', 'nircam_wlp4_clear_magnitude', +# 'nircam_f070w_wlp8_magnitude', 'nircam_f090w_wlp8_magnitude', +# 'nircam_f115w_wlp8_magnitude', 'nircam_f140m_wlp8_magnitude', +# 'nircam_f150w2_wlp8_magnitude', 'nircam_f150w_wlp8_magnitude', +# 'nircam_f162m_wlp8_magnitude', 'nircam_f164n_wlp8_magnitude', +# 'nircam_f182m_wlp8_magnitude', 'nircam_f187n_wlp8_magnitude', +# 'nircam_f200w_wlp8_magnitude', 'nircam_f210m_wlp8_magnitude', +# 'nircam_f212n_wlp8_magnitude', 'nircam_wlp4_wlp8_magnitude'] +# +# standard_labels = [] +# n1 = 0 +# for line in lines: +# line = line.strip('\n') +# if '#' in line[0:1]: +# values = line.split('#') +# if len(values) == 3: +# v1 = values[-1].split() +# for loop in range(4): +# standard_values[loop, n1] = float(v1[loop]) +# n1 = n1 + 1 +# else: +# values = line.split('#') +# standard_labels.append(values[-1]) +# return standard_magnitudes, standard_values, standard_filters, standard_labels + + +def combine_and_interpolate(gaia_cat, gaia_2mass, gaia_2mass_crossref, gaia_wise, + gaia_wise_crossref, twomass_cat, wise_cat, instrument, filter_names, + starting_index): + """ + This function combines GAIA/2MASS/WISE photometry to estimate JWST filter + magnitudes. The algorithm depends a bit on what magnitudes are available. + + Note it is implicitly assumed that the GAIA/2MASS/WISE data are for the + same area of sky. + The code gives different results depending on what magnitudes are available. + If only the GAIA g magnitude is available, this is used for all filters. + If only the GAIA g/BP/RP magnitudes are available, the BP - RP magnitude + value is used to predict the infrared magnitudes based on standard stellar + atmosphere simulations. + If any 2MASS or WISE magnitudes (excluding upper limits) are available the + JWST magnitudes are found by wavelength interpolation using the pivot + wavelengths of the different filters including those for GAIA/2MASS/WISE. + + Parameters + ---------- + gaia_cat : astropy.table.Table + contains GAIA DR2 search results in table form + + gaia_2mass : astropy.table.Table + contains 2MASS catalogue values from the GAIA DR2 archive + + gaia_2mass_crossref : astropy.table.Table + contains GAIA/2MASS cross-references from the GAIA DR2 archive + + gaia_wise : astropy.table.Table + contains WISE catalogue values from the GAIA DR2 archive + + gaia_wise_crossref : astropy.table.Table + contains GAIA/WISE cross-references from the GAIA DR2 archive + + twomass_cat : astropy.table.Table + contains 2MASS data from IPAC in table form + + wise_cat : astropy.table.Table + contains WISE data from IPAC in table form + + instrument : str + Name of the instrument for which filter magnitudes are + needed: "NIRcam", "NIRISS", "Guider", or "All" + + filter_names : list or None + List of names of the filters to select for the instrument. + If the list is empty, or the value is None, all filters are + selected. + + starting_index : int + Beginning value to use for the 'index' column of the catalog. Default is 1. + + Returns + ------- + outcat : mirage.catalogs.create_catalog.PointSourceCatalog + This is the catalog of positions/magnitudes. + """ + standard_magnitudes, standard_values, standard_filters, standard_labels = read_standard_magnitudes() + nfilters = len(filter_names) + ngaia = len(gaia_cat['ra']) + n2mass1 = len(gaia_2mass['ra']) + nwise1 = len(gaia_wise_crossref['ra']) + n2mass2 = len(twomass_cat['ra']) + nwise2 = len(wise_cat['ra']) + nout = ngaia + n2mass2 + nwise2 + in_magnitudes = np.zeros((nout, 10), dtype=np.float32) + 10000.0 + raout = np.zeros((nout), dtype=np.float32) + decout = np.zeros((nout), dtype=np.float32) + # magnitudes Gaia bp, g, rp; 2MASS J, H, Ks; WISE W1, W2, W3, W4 + in_filters = ['GAIA gbp', 'GAIA g ', 'GAIA grp', '2MASS J', '2MASS H', + '2MASS Ks', 'WISE W1', 'WISE W2', 'WISE W3', 'WISE W4'] + inds = crossmatch_filter_names(in_filters, standard_filters) + # if len(inds) != len(in_filters): + # logger.warning('Error matching the filters to the standard set.') + # return None + # first populate the gaia sources, with cross-references + in_magnitudes[0:ngaia, 1] = gaia_cat['phot_g_mean_mag'] + raout[0:ngaia] = gaia_cat['ra'] + decout[0:ngaia] = gaia_cat['dec'] + ngaia2masscr, ngaia2mass = twomass_crossmatch(gaia_cat, gaia_2mass, gaia_2mass_crossref, twomass_cat) + + twomassflag = [True] * n2mass2 + for n1 in range(n2mass2): + for loop in range(len(gaia_2mass['ra'])): + if gaia_2mass['DESIGNATION'][loop] == twomass_cat['designation'][n1]: + if ngaia2masscr[n1] >= 0: + twomassflag[n1] = False + matchwise, gaiawiseinds, twomasswiseinds = wise_crossmatch(gaia_cat, gaia_wise, gaia_wise_crossref, wise_cat, twomass_cat) + + wisekeys = ['w1sigmpro', 'w2sigmpro', 'w3sigmpro', 'w4sigmpro'] + + # Set invalid values to NaN + try: + gaia_bp_mags = gaia_cat['phot_bp_mean_mag'].filled(np.nan) + gaia_rp_mags = gaia_cat['phot_rp_mean_mag'].filled(np.nan) + except: + gaia_bp_mags = gaia_cat['phot_bp_mean_mag'] + gaia_rp_mags = gaia_cat['phot_rp_mean_mag'] + + for loop in range(ngaia): + try: + in_magnitudes[loop, 0] = gaia_bp_mags[loop] + in_magnitudes[loop, 2] = gaia_rp_mags[loop] + except: + pass + + # Find the index in gaia_2mass corresponding to this + # gaia_cat entry + g2m = np.where(ngaia2mass == loop)[0] + if len(g2m) > 0: + g2m = g2m[0] + else: + g2m = None + + # see if there is a 2MASS match + for n1 in range(n2mass2): + if loop == ngaia2masscr[n1]: + in_magnitudes[loop, 3] = twomass_cat['j_m'][n1] + in_magnitudes[loop, 4] = twomass_cat['h_m'][n1] + in_magnitudes[loop, 5] = twomass_cat['k_m'][n1] + + if g2m is not None: + for l1 in range(3): + if gaia_2mass['ph_qual'][g2m][l1] == 'U': + in_magnitudes[loop, 3+l1] = 10000. + + # see if there is a WISE match + for n2 in range(len(matchwise)): + if matchwise[n2] and (gaiawiseinds[n2] == loop): + in_magnitudes[loop, 6] = wise_cat['w1mpro'][n2] + in_magnitudes[loop, 7] = wise_cat['w2mpro'][n2] + in_magnitudes[loop, 8] = wise_cat['w3mpro'][n2] + in_magnitudes[loop, 9] = wise_cat['w4mpro'][n2] + + for l1 in range(4): + if not isinstance(wise_cat[wisekeys[l1]][n2], float): + in_magnitudes[loop, 6+l1] = 10000. + + # Add in any 2MASS sources with no GAIA match + n1 = 0 + noff = ngaia + for loop in range(n2mass2): + if twomassflag[loop]: + raout[noff+n1] = twomass_cat['ra'][loop] + decout[noff+n1] = twomass_cat['dec'][loop] + in_magnitudes[noff+n1, 3] = twomass_cat['j_m'][loop] + in_magnitudes[noff+n1, 4] = twomass_cat['h_m'][loop] + in_magnitudes[noff+n1, 5] = twomass_cat['k_m'][loop] + + for l1 in range(3): + if twomass_cat['ph_qual'][loop][l1] == 'U': + in_magnitudes[noff+n1, 3+l1] = 10000. + # Check to see if there is a WISE cross-match + for l1 in range(len(twomasswiseinds)): + if twomasswiseinds[l1] == loop: + in_magnitudes[noff+n1, 6] = wise_cat['w1mpro'][l1] + in_magnitudes[noff+n1, 7] = wise_cat['w2mpro'][l1] + in_magnitudes[noff+n1, 8] = wise_cat['w3mpro'][l1] + in_magnitudes[noff+n1, 9] = wise_cat['w4mpro'][l1] + + for l2 in range(4): + if not isinstance(wise_cat[wisekeys[l2]][l1], float): + in_magnitudes[noff+n1, 6+l2] = 10000. + n1 = n1 + 1 + # Finally, add in WISE sources that have not been cross-matched to GAIA + # or 2MASS. + noff = ngaia+n1 + n1 = 0 + for loop in range(nwise2): + if (not matchwise[loop]) and (twomasswiseinds[loop] < 0): + raout[noff+n1] = wise_cat['ra'][loop] + decout[noff+n1] = wise_cat['dec'][loop] + in_magnitudes[noff+n1, 6] = wise_cat['w1mpro'][loop] + in_magnitudes[noff+n1, 7] = wise_cat['w2mpro'][loop] + in_magnitudes[noff+n1, 8] = wise_cat['w3mpro'][loop] + in_magnitudes[noff+n1, 9] = wise_cat['w3mpro'][loop] + + for l1 in range(4): + if not isinstance(wise_cat[wisekeys[l1]][loop], float): + in_magnitudes[noff+n1, 6+l1] = 10000. + n1 = n1+1 + # Now, convert to JWST magnitudes either by transformation (for sources + # with GAIA G/BP/RP magnitudes) or by interpolation (all other + # cases). + out_filter_names = make_mag_column_names(instrument, filter_names) + in_wavelengths = np.squeeze(np.copy(standard_values[0, inds])) + inds = crossmatch_filter_names(out_filter_names, standard_filters) + if len(inds) < 1: + return None + out_wavelengths = np.squeeze(np.copy(standard_values[0, inds])) + if len(inds) == 1: + out_wavelengths = np.zeros((1), dtype=np.float32)+out_wavelengths + nfinal = noff + n1 + + out_magnitudes = np.zeros((nout, len(out_filter_names)), + dtype=np.float32) + for loop in range(nfinal): + values = interpolate_magnitudes(in_wavelengths, in_magnitudes[loop, :], + out_wavelengths, out_filter_names) + out_magnitudes[loop, :] = np.copy(values) + raout = np.copy(raout[0:nfinal]) + decout = np.copy(decout[0:nfinal]) + out_magnitudes = np.copy(out_magnitudes[0:nfinal, :]) + outcat = PointSourceCatalog(ra=raout, dec=decout, starting_index=starting_index) + n1 = 0 + for column_value in out_filter_names: + outcat.add_magnitude_column(np.squeeze(out_magnitudes[:, n1]), column_name=column_value, + magnitude_system='vegamag') + n1 = n1+1 + + return outcat + + +# def twomass_crossmatch(gaia_cat, gaia_2mass, gaia_2mass_crossref, twomass_cat): +# """ +# Take the GAIA to 2MASS cross references and make sure that there is only +# one GAIA source cross-matched to a given 2MASS source in the table. +# +# Parameters +# ---------- +# gaia_cat : astropy.table.Table +# contains the GAIA DR2 catalogue values from the GAIA archive +# +# gaia_2mass : astropy.table.Table +# contains 2MASS catalogue values from the GAIA DR2 archive +# +# gaia_2mass_crossref : astropy.table.Table +# contains GAIA/2MASS cross-references from the GAIA DR2 archive +# +# twomass_cat : astropy.table.Table +# contains the 2MASS catalogue values from IPAC +# +# Returns +# ------- +# ngaia2masscr : numpy.ndarray +# an integer array of cross match indexes, giving for +# each 2MASS source the index number of the associated +# GAIA source in the main GAIA table, or a value of -10 +# where there is no match +# """ +# ntable1 = len(gaia_cat['ra']) +# ntable2 = len(gaia_2mass['ra']) +# ntable3 = len(gaia_2mass_crossref['ra']) +# ntable4 = len(twomass_cat['ra']) +# ngaia2mass = np.zeros((ntable2), dtype=np.int16) - 10 +# ngaia2masscr = np.zeros((ntable4), dtype=np.int16) - 10 +# for loop in range(ntable2): +# # find the number of entries of each 2MASS source in the cross references +# nmatch = 0 +# namematch = [] +# match1 = [] +# for l1 in range(ntable3): +# if gaia_2mass['DESIGNATION'][loop] == gaia_2mass_crossref['DESIGNATION'][l1]: +# nmatch = nmatch + 1 +# namematch.append(gaia_2mass_crossref['designation'][l1]) +# match1.append(loop) +# # Find the matching GAIA sources and select the one with the best +# # magnitude match within a radius of 0.3 arc-seconds of the 2MASS +# # position. +# magkeys = ['j_m', 'h_m', 'ks_m'] +# if nmatch > 0: +# mindelm = 10000.0 +# ncross = -10 +# for l1 in range(nmatch): +# for l2 in range(ntable1): +# gmag = 0. +# irmag = -10000.0 +# if gaia_cat['DESIGNATION'][l2] == namematch[l1]: +# ra1 = gaia_cat['ra'][l2] +# dec1 = gaia_cat['dec'][l2] +# ra2 = gaia_2mass['ra'][match1[l1]] +# dec2 = gaia_2mass['dec'][match1[l1]] +# p1 = SkyCoord(ra1*u.deg, dec1*u.deg) +# p2 = SkyCoord(ra2*u.deg, dec2*u.deg) +# if p2.separation(p1).arcsec < 0.5: +# gmag = gaia_cat['phot_g_mean_mag'][l2] +# # select 2MASS magnitude: first ph_qual = A or if none +# # is of quality A the first ph_qual = B or if none is +# # of quality A or B then the first non U value. +# magval = gaia_2mass['ph_qual'][match1[l1]] +# if isinstance(magval, str): +# qual = magval[0:3] +# else: +# qual = magval.decode()[0:3] +# +# if (irmag < -100.): +# a_pos = qual.find('A') +# if a_pos != -1: +# irmag = gaia_2mass[magkeys[a_pos]][match1[l1]] +# else: +# b_pos = qual.find('B') +# if b_pos != -1: +# irmag = gaia_2mass[magkeys[b_pos]][match1[l1]] +# else: +# non_u_pos = re.search(r'[^U]', qual) +# if non_u_pos is not None: +# irmag = gaia_2mass[magkeys[non_u_pos.start()]][match1[l1]] +# +# delm = gmag - irmag +# if (delm > -1.2) and (delm < 30.0): +# if delm < mindelm: +# ncross = l2 +# mindelm = delm +# ngaia2mass[loop] = ncross +# # Now locate the 2MASS sources in the IPAC 2MASS table, and put in the +# # index values. +# for loop in range(ntable4): +# for n1 in range(ntable2): +# if twomass_cat['designation'][loop] == gaia_2mass['DESIGNATION'][n1]: +# ngaia2masscr[loop] = ngaia2mass[n1] +# return ngaia2masscr, ngaia2mass +# +# +# def wise_crossmatch(gaia_cat, gaia_wise, gaia_wise_crossref, wise_cat, twomass_cat): +# """ +# Relate the GAIA/WISE designations to the WISE catalogue designations, since the names +# change a little between the different catalogues. Return the boolean list of matches and +# the index values in wise_cat. +# +# Parameters +# ---------- +# gaia_cat : astropy.table.Table +# contains the GAIA DR2 catalogue values from the GAIA archive +# +# gaia_wise : astropy.table.Table +# contains WISE catalogue values from the GAIA DR2 archive +# +# gaia_wise_crossref : astropy.table.Table +# contains GAIA/WISE cross-references from the GAIA DR2 archive +# +# wise_cat : astropy.table.Table +# contains WISE data from IPAC in table form +# +# twomass_cat : astropy.table.Table +# contains 2MASS data from IPAC in table form +# +# gaia2masscr : list +# list of integers of the cross-reference indexes from 2MASS to GAIA +# +# Returns +# ------- +# matchwise : list +# boolean list of length equal to wise_cat with True if there is a +# cross-match with GAIA +# +# gaiawiseinds : list +# list of integer index values from wise_cat to gaia_cat (i.e. the +# GAIA number to which the WISE source corresponds) +# +# twomasswiseinds : list +# list of integer index values from wise_cat to twomass_cat (i.e. +# the 2MASS number to which the WISE source corresponds) +# """ +# num_entries = len(wise_cat['ra']) +# num_gaia = len(gaia_cat['ra']) +# matchwise = [False] * num_entries +# gaiawiseinds = [-1] * num_entries +# twomasswiseinds = [-1] * num_entries +# ra1 = np.copy(wise_cat['ra']) +# dec1 = np.copy(wise_cat['dec']) +# ra2 = np.copy(gaia_wise['ra']) +# dec2 = np.copy(gaia_wise['dec']) +# ra3 = np.copy(gaia_wise_crossref['ra']) +# dec3 = np.copy(gaia_wise_crossref['dec']) +# sc1 = SkyCoord(ra=ra1*u.degree, dec=dec1*u.degree) +# sc2 = SkyCoord(ra=ra2*u.degree, dec=dec2*u.degree) +# sc3 = SkyCoord(ra=ra3*u.degree, dec=dec3*u.degree) +# n2mass = len(twomass_cat['ra']) +# # look at the WISE data and find the sources with listed 2MASS counterparts +# for loop in range(num_entries): +# if not np.isnan(wise_cat['h_m_2mass'][loop]): +# for n1 in range(n2mass): +# if (abs(twomass_cat['j_m'][n1] - wise_cat['j_m_2mass'][loop]) < 0.001) and \ +# (abs(twomass_cat['h_m'][n1] - wise_cat['h_m_2mass'][loop]) < 0.001) and \ +# (abs(twomass_cat['k_m'][n1] - wise_cat['k_m_2mass'][loop]) < 0.001): +# twomasswiseinds[loop] = n1 +# break +# # match WISE to gaia_wise by position +# idx, d2d, d3d = sc3.match_to_catalog_sky(sc1) +# for loop in range(len(idx)): +# if (d2d[loop].arcsec) < 0.4: +# matchwise[idx[loop]] = True +# for n2 in range(num_gaia): +# if gaia_cat['DESIGNATION'][n2] == gaia_wise_crossref['DESIGNATION'][loop]: +# gaiawiseinds[idx[loop]] = n2 +# break +# return matchwise, gaiawiseinds, twomasswiseinds +# +# +# def interpolate_magnitudes(wl1, mag1, wl2, filternames): +# """ +# Given an input set of magnitudes and associated wavelengths, interpolate +# these in wavelength to get approximate JWST magnitudes. It is assumed that +# the first 3 magnitudes in the input vector mag1 are the GAIA BP, g, and +# RP filters and that the rest are long wavelength magnitudes. If only the +# GAIA g filter magnitude is available it is used for all output magnitudes. +# If only GAIA BP/g/RP magnitudes are available the GAIA BP/RP magnitudes +# are transformed to JWST magnitudes based on stellar model values. In the +# case where there is some infrared data, the numpy interp function is used +# to interpolate the magnitudes. In the inputs filters without data are +# assigned magnitude > 100, and these are not used in the interpolation. +# +# As is normal for numpy interp, wavelength values outside the defined +# range in the input magnitudes get nearest magnitude value. +# +# The input magnitudes must be A0V magnitudes not AB or ST magnitudes. +# +# Parameters +# ---------- +# wl1 : numpy.ndarray +# The pivot wavelengths, in microns, for the input filters. The +# values need to be sorted before passing to the routine. +# +# mag1 : numpy.ndarray +# The associated magnitudes (A0V by assumption). Values > 100. +# indicate "no data". +# +# wl2 : numpy.ndarray +# The pivot wavelengths, in microns, for the output filters +# +# filternames : list +# The names of the output filters, used when the GAIA blue/red +# magnitudes are available but no near-infrared magnitudes +# are available. +# +# Returns +# ------- +# out_magnitudes : numpy.ndarray +# The output interpolated magnitudes corresponding to the +# wavelengths. +# """ +# inds = np.isnan(mag1) +# mag1[inds] = 10000. +# nout = len(wl2) +# outmags = wl2*0.+10000. +# # Case 1: All dummy values, return all magnitudes = 10000.0 (this should +# # not happen) +# if np.min(mag1) > 100.: +# return outmags +# # Case 2, Only GAIA magnitudes. Either transform from the GAIA BP and RP +# # colour to the JWST magnitudes or assume a default colour value +# # for the star, equivalent to a star of type K4V. +# if np.min(mag1[3:]) > 100.: +# if (mag1[0] > 100.) or (mag1[2] > 100.): +# # Where the BP and RP magnitudes are not available, make colours +# # matching a K4V star (assumed T=4500, log(g)=5.0) +# inmags = np.zeros((2), dtype=np.float32) +# inmags[0] = mag1[1] + 0.5923 +# inmags[1] = mag1[1] - 0.7217 +# else: +# inmags = np.copy(mag1[[0, 2]]) +# +# standard_magnitudes, standard_values, standard_filters, standard_labels = read_standard_magnitudes() +# in_filters = ['GAIA gbp', 'GAIA grp'] +# newmags = match_model_magnitudes(inmags, in_filters, standard_magnitudes, standard_values, +# standard_filters, standard_labels) +# inds = crossmatch_filter_names(filternames, standard_filters) +# outmags = np.copy(newmags[inds]) +# return outmags +# # Case 3, some infrared magnitudes are available, interpolate good values +# # (magnitude = 10000 for bad values) +# inds = np.where(mag1 < 100.) +# inmags = mag1[inds] +# inwl = wl1[inds] +# outmags = np.interp(wl2, inwl, inmags) +# +# return outmags +# +# +# def add_filter_names(headerlist, filter_names, filter_labels, filters): +# """ +# Add a set of filter header labels (i.e. niriss_f090w_magnitude for example) +# to a list, by matching filter names. +# +# Parameters +# ---------- +# headerlist : list +# An existing (possibly empty) list to hold the header string for the +# output magnitudes +# +# filter_names : list +# The list of available filter names to match to +# +# filter_labels : list +# The corresponding list of filter labels +# +# filters : list +# The list of filter names to match, or an empty list or None to get +# all available filter labels +# +# Returns +# ------- +# headerlist : list +# The revised list of labels with the filter labels requested appended +# """ +# try: +# n1 = len(filters) +# except: +# n1 = 0 +# if (filters is None) or (n1 == 0): +# for loop in range(len(filter_labels)): +# headerlist.append(filter_labels[loop]) +# if n1 > 0: +# for loop in range(n1): +# for k in range(len(filter_names)): +# if filters[loop].lower() == filter_names[k].lower(): +# headerlist.append(filter_labels[k]) +# return headerlist +# +# +# def combine_catalogs(cat1, cat2, magnitude_fill_value=99., starting_index=1): +# """Combine two Mirage catalog objects. Catalogs must be of the same +# type (e.g. PointSourceCatalog), and have the same values for position +# units (RA, Dec or x, y) velocity units (arcsec/hour vs pixels/hour), +# and galaxy radius units (arcsec vs pixels), if present. This uses the +# astropy.table join method. A source common to both tables will be +# identified and combined into a single row. Sources that are not common +# to both input catalogs will have a placeholder value for magnitude +# columns where there is no data. +# +# Parameters +# ---------- +# cat1 : mirage.catalogs.catalog_generator.XXCatalog +# First catalog to be joined +# +# cat2 : mirage.catalogs.catalog_generator.XXCatalog +# Second catalog to be joined +# +# magnitude_fill_value : float +# Magnitude value to use for sources that are undefined in one catalog +# +# starting_index : int +# Index value to begin counting with inside the new catalog. These values +# will be placed in the 'index' column of the new catalog +# +# Returns +# ------- +# new_cat : mirage.catalogs.catalog_generator.XXCatalog +# Combined catalog +# """ +# # Be sure that location coordinate systems are the same between the +# # two catalogs +# # Make sure the catalogs are the same type +# if type(cat1) != type(cat2): +# raise TypeError("Catalogs are different types. Cannot be combined.") +# +# if cat1.location_units != cat2.location_units: +# raise ValueError('Coordinate mismatch in catalogs to combine.') +# +# # Join catalog tables. Set fill value for all magnitude columns +# combined = join(cat1.table, cat2.table, join_type='outer') +# mag_cols = [col for col in combined.colnames if 'magnitude' in col] +# for col in mag_cols: +# combined[col].fill_value = magnitude_fill_value +# combined = combined.filled() +# +# # NOTE that the order of this if/elif statement matters, as different +# # catalog classes inherit from each other. +# +# # --------------Moving Galaxies--------------------------------------- +# if isinstance(cat1, MovingSersicCatalog): +# if cat1.velocity_units != cat2.velocity_units: +# raise ValueError('Velocity unit mismatch in catalogs to combine.') +# if cat1.radius_units != cat2.radius_units: +# raise ValueError('Radius unit mismatch in catalogs to combine.') +# +# if cat1.location_units == 'position_RA_Dec': +# if cat1.velocity_units == 'velocity_RA_Dec': +# new_cat = MovingSersicCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# else: +# new_cat = MovingSersicCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# else: +# if cat1.velocity_units == 'velocity_RA_Dec': +# new_cat = MovingSersicCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# else: +# new_cat = MovingSersicCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# +# # --------------Moving Extended Sources------------------------------- +# elif isinstance(cat1, MovingExtendedCatalog): +# if cat1.velocity_units != cat2.velocity_units: +# raise ValueError('Velocity unit mismatch in catalogs to combine.') +# +# if cat1.location_units == 'position_RA_Dec': +# if cat1.velocity_units == 'velocity_RA_Dec': +# new_cat = MovingExtendedCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# else: +# new_cat = MovingExtendedCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# else: +# if cat1.velocity_units == 'velocity_RA_Dec': +# new_cat = MovingExtendedCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# else: +# new_cat = MovingExtendedCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# +# # --------------------Galaxies------------------------------------ +# elif isinstance(cat1, GalaxyCatalog): +# if cat1.radius_units != cat2.radius_units: +# raise ValueError('Radius unit mismatch in catalogs to combine.') +# +# if cat1.location_units == 'position_RA_Dec': +# new_cat = GalaxyCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# else: +# new_cat = GalaxyCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# ellipticity=combined['ellipticity'].data, +# radius=combined['radius'].data, +# sersic_index=combined['sersic_index'].data, +# position_angle=combined['pos_angle'].data, +# radius_units=cat1.radius_units, +# starting_index=starting_index) +# +# # ------------------Extended Sources------------------------------- +# elif isinstance(cat1, ExtendedCatalog): +# if cat1.location_units == 'position_RA_Dec': +# new_cat = ExtendedCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# else: +# new_cat = ExtendedCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# filenames=combined['filename'].data, +# position_angle=combined['pos_angle'].data, +# starting_index=starting_index) +# +# # -------------Moving Point Sources-------------------------------- +# elif isinstance(cat1, MovingPointSourceCatalog): +# if cat1.velocity_units != cat2.velocity_units: +# raise ValueError('Velocity unit mismatch in catalogs to combine.') +# +# if cat1.location_units == 'position_RA_Dec': +# if cat1.velocity_units == 'velocity_RA_Dec': +# new_cat = MovingPointSourceCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# starting_index=starting_index) +# else: +# new_cat = MovingPointSourceCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# starting_index=starting_index) +# else: +# if cat1.location_units == 'velocity_RA_Dec': +# new_cat = MovingPointSourceCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# ra_velocity=combined['ra_velocity'].data, +# dec_velocity=combined['dec_velocity'].data, +# starting_index=starting_index) +# else: +# new_cat = MovingPointSourceCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# x_velocity=combined['ra_velocity'].data, +# y_velocity=combined['dec_velocity'].data, +# starting_index=starting_index) +# +# # --------------------Point Sources------------------------------- +# elif isinstance(cat1, PointSourceCatalog): +# # Create new catalog object and populate +# if cat1.location_units == 'position_RA_Dec': +# new_cat = PointSourceCatalog(ra=combined['x_or_RA'].data, +# dec=combined['y_or_Dec'].data, +# starting_index=starting_index) +# else: +# new_cat = PointSourceCatalog(x=combined['x_or_RA'].data, +# y=combined['y_or_Dec'].data, +# starting_index=starting_index) +# +# +# # -------------Add magnitude columns------------------------------- +# mag_cols = [colname for colname in combined.colnames if 'magnitude' in colname] +# cat1_mag_cols = [colname for colname in cat1.magnitudes.keys() if 'magnitude' in colname] +# for col in mag_cols: +# new_cat.add_magnitude_column(combined[col].data, column_name=col, +# magnitude_system=cat1.magnitudes[cat1_mag_cols[0]][0]) +# +# return new_cat +# +# +# def combine_catalogs_v0(observed_jwst, besancon_jwst): +# """ +# Replaced by combine_catalogs. Do we need to keep this? +# This code takes two input PointSourceCatalog objects and returns the +# combined PointSourceCatalog. The two catalogs have to be in the same +# magnitude units and have the same set of filters. +# Input values: +# observed_jwst: (mirage.catalogs.catalog_generator.PointSourceCatalog) +# Catalog object one +# besancon_jwst: (mirage.catalogs.catalog_generator.PointSourceCatalog) +# Catalog object two +# Return value: +# outcat: (mirage.catalogs.catalog_generator.PointSourceCatalog) +# A new catalog object combining the two input catalogs +# """ +# logger = logging.getLogger('mirage.catalogs.create_catalog.combine_catalogs_v0') +# +# keys1 = list(observed_jwst.magnitudes.keys()) +# keys2 = list(besancon_jwst.magnitudes.keys()) +# besanconinds = [] +# for key in keys1: +# for loop in range(len(keys2)): +# if key == keys2[loop]: +# besanconinds.append(loop) +# if len(keys1) != len(besanconinds): +# logger.warning('Magnitude mismatch in catalogs to combine. Will return None.') +# return None +# if observed_jwst.location_units != besancon_jwst.location_units: +# logger.warning('Coordinate mismatch in catalogs to combine. Will return None.') +# return None +# ra1 = observed_jwst.ra +# dec1 = observed_jwst.dec +# ra2 = besancon_jwst.ra +# dec2 = besancon_jwst.dec +# raout = np.concatenate((ra1, ra2)) +# decout = np.concatenate((dec1, dec2)) +# +# outcat = PointSourceCatalog(ra=raout, dec=decout) +# #outcat.location_units = observed_jwst.location_units +# for key in keys1: +# mag1 = observed_jwst.magnitudes[key][1] +# mag2 = besancon_jwst.magnitudes[key][1] +# magout = np.concatenate((mag1, mag2)) +# values = key.split('_') +# instrument = values[0] +# filter = values[1] +# outcat.add_magnitude_column(magout, magnitude_system=observed_jwst.magnitudes[key][0], +# instrument=instrument, filter_name=filter) +# return outcat +# +# +# def get_gaia_ptsrc_catalog(ra, dec, box_width): +# """Wrapper around Gaia query and creation of mirage-formatted catalog""" +# gaia_cat, gaia_mag_cols, gaia_2mass, gaia_2mass_crossref, gaia_wise, \ +# gaia_wise_crossref = query_GAIA_ptsrc_catalog(ra, dec, box_width) +# gaia_mirage = mirage_ptsrc_catalog_from_table(gaia_cat, 'gaia', gaia_mag_cols) +# return gaia_mirage, gaia_cat, gaia_2mass_crossref, gaia_wise_crossref + + +def query_GAIA_ptsrc_catalog(ra, dec, box_width): + """ + This code is adapted from gaia_crossreference.py by Johannes Sahlmann. It + queries the GAIA DR2 archive for sources within a given square region of + the sky and rerurns the catalogue along withe the 2MASS and WISE + cross-references for use in combining the catalogues to get the infrared + magnitudes for the sources that are detected in the other telescopes. + + The GAIA main table has all the GAIA values, but the other tables have only + specific subsets of the data values to save space. In the "crossref" + tables the 'designation' is the GAIA name while 'designation_2' is the + 2MASS or WISE name. + + Parameters + ---------- + ra : float + Right ascension of the target field in degrees + + dec : float + Declination of the target field in degrees + + box_width : float + Width of the (square) sky area, in arc-seconds + + Returns + ------- + gaia_cat : astropy.table.Table + The gaia DR2 magnitudes and other data + + gaia_mag_cols : list + List of the GAIA magnitude column names + + gaia_2mass : astropy.table.Table + The 2MASS values as returned from the GAIA archive + + gaia_2mass_crossref : astropy.table.Table + The cross-reference list with 2MASS sources + + gaia_wise : astropy.table.Table + The WISE values as returned from the GAIA archive + + gaia_wise_crossref : astropy.table.Table + The cross-reference list with WISE sources + """ + logger = logging.getLogger('mirage.catalogs.create_catalog.query_GAIA_ptsrc_catalog') + + data = OrderedDict() + data['gaia'] = OrderedDict() + data['tmass'] = OrderedDict() + data['wise'] = OrderedDict() + data['tmass_crossmatch'] = OrderedDict() + data['wise_crossmatch'] = OrderedDict() + # convert box width to degrees for the GAIA query + boxwidth = box_width/3600. + data['gaia']['query'] = """SELECT * FROM gaiadr2.gaia_source AS gaia + WHERE 1=CONTAINS(POINT('ICRS',gaia.ra,gaia.dec), BOX('ICRS',{}, {}, {}, {})) + """.format(ra, dec, boxwidth, boxwidth) + + data['tmass']['query'] = """SELECT ra,dec,ph_qual,j_m,h_m,ks_m,designation FROM gaiadr1.tmass_original_valid AS tmass + WHERE 1=CONTAINS(POINT('ICRS',tmass.ra,tmass.dec), BOX('ICRS',{}, {}, {}, {})) + """.format(ra, dec, boxwidth, boxwidth) + + data['tmass_crossmatch']['query'] = """SELECT field.ra,field.dec,field.designation,tmass.designation from + (SELECT gaia.* + FROM gaiadr2.gaia_source AS gaia + WHERE 1=CONTAINS(POINT('ICRS',gaia.ra,gaia.dec), BOX('ICRS',{}, {}, {}, {}))) + AS field + INNER JOIN gaiadr2.tmass_best_neighbour AS xmatch + ON field.source_id = xmatch.source_id + INNER JOIN gaiadr1.tmass_original_valid AS tmass + ON tmass.tmass_oid = xmatch.tmass_oid + """.format(ra, dec, boxwidth, boxwidth) + + data['wise']['query'] = """SELECT ra,dec,ph_qual,w1mpro,w2mpro,w3mpro,w4mpro,designation FROM gaiadr1.allwise_original_valid AS wise + WHERE 1=CONTAINS(POINT('ICRS',wise.ra,wise.dec), BOX('ICRS',{}, {}, {}, {})) + """.format(ra, dec, boxwidth, boxwidth) + + data['wise_crossmatch']['query'] = """SELECT field.ra,field.dec,field.designation,allwise.designation from + (SELECT gaia.* + FROM gaiadr2.gaia_source AS gaia + WHERE 1=CONTAINS(POINT('ICRS',gaia.ra,gaia.dec), BOX('ICRS',{}, {}, {}, {}))) + AS field + INNER JOIN gaiadr2.allwise_best_neighbour AS xmatch + ON field.source_id = xmatch.source_id + INNER JOIN gaiadr1.allwise_original_valid AS allwise + ON allwise.designation = xmatch.original_ext_source_id + """.format(ra, dec, boxwidth, boxwidth) + + outvalues = {} + logger.info('Searching the GAIA DR2 catalog') + for key in data.keys(): + job = Gaia.launch_job(data[key]['query'], dump_to_file=False) + table = job.get_results() + outvalues[key] = table + logger.info('Retrieved {} sources for catalog {}'.format(len(table), key)) + + gaia_mag_cols = ['phot_g_mean_mag', 'phot_bp_mean_mag', 'phot_rp_mean_mag'] + return outvalues['gaia'], gaia_mag_cols, outvalues['tmass'], outvalues['tmass_crossmatch'], outvalues['wise'], outvalues['wise_crossmatch'] + + +# def besancon(ra, dec, box_width, username='', kmag_limits=(13, 29)): +# """ +# This routine calls a server to get a Besancon star count model over a given +# small sky area at a defined position. For documentation of the Besancon +# model see the web site: http://model.obs-besancon.fr/ +# The star count model returns V, J, H, K, and L magnitudes plus other +# information. Here the magnitudes and the extinction are of most interest +# in producing the simulated NIRISS/NIRCam/Guider magnitudes. +# Note that the Besancon model uses a simplified exinction model, and so +# this is carried over into the Mirage point source lists. +# An email address is required for the besancon call. The KLEH value of +# 2 in the call to the model means that the input coordinates are +# RA, Dec in units of degrees. +# +# Parameters +# ---------- +# ra : float +# Right ascension or galactic longitude for the sky +# position where the model is to be calculated, in degrees. +# +# dec : float +# Declinatinon or galactic latitude for the sky +# position where the model is to be calculated, in degrees. +# +# box_width : float +# Size of the (square) sky area to be simulated, in arc-seconds +# +# username : str +# Username associated with the Besanson model website (listed +# in the docstring above) that will be used to submit the query. +# The website will prompt for a password before creating the +# query +# +# kmag_limits : tup +# The range of allowable K magnitudes for the model, given +# as a tuple (min, max). The default is (13,29). The +# bright limit will generally be set by 2MASS completeness +# limit. The 2MASS faint limit for any given sky position +# is roughly magnitude 15 to 16 in general. As the 2MASS +# completeness limit varies with position and the +# completeness limit is above the faint limit the Besancon +# bright limit is taken as magnitude 14 by default. Note +# that for the JWST instruments the 2MASS sources will +# saturate in full frame imaging in many cases. +# """ +# from astropy import units as u +# +# logger = logging.getLogger('mirage.catalogs.create_catalog.besancon') +# +# # Specified coordinates. Will need to convert to galactic long and lat +# # when calling model +# ra = ra * u.deg +# dec = dec * u.deg +# box_width = box_width * u.arcsec +# +# min_ra = ra - box_width / 2 +# max_ra = ra + box_width / 2 +# min_dec = dec - box_width / 2 +# max_dec = dec + box_width / 2 +# +# # Define the list of colors to return +# colors = 'V-K,J-K,J-H,J-L' +# +# # Band minimum and maximum values correspond to the 9 Johnson-Cousins +# # filters used by the Besancon model: V, B, U, R, I, J, H, K, L +# # in that order. +# band_min = '{},-99.0,-99.0,-99.0,-99.0,-99.0,-99.0,-99.0,-99.0'.format(kmag_limits[0]) +# band_max = '{},99.0,99.0,99.0,99.0,99.0,99.0,99.0,99.0'.format(kmag_limits[1]) +# +# # Query the model +# path = os.path.dirname(__file__) +# client = os.path.join(path, 'galmod_client.py') +# command = (('python {} --url "https://model.obs-besancon.fr/ws/" --user {} ' +# '--create -p KLEH 2 -p Coor1_min {} -p Coor2_min {} -p Coor1_max {} -p Coor2_max {} ' +# '-p ref_filter K -p acol {} -p band_min {} -p band_max {} --run') +# .format(client, username, min_ra.value, min_dec.value, max_ra.value, max_dec.value, +# colors, band_min, band_max)) +# logger.info('Running command: \n {}'.format(command)) +# os.system(command) +# +# +# def crop_besancon(ra, dec, box_width, catalog_file, ra_column_name='RAJ2000', dec_column_name='DECJ2000'): +# """Given a file containing an ascii source catalog, this function +# will crop the catalog such that it contains only sources within the +# provided RA/Dec range. +# +# Parameters +# ---------- +# ra : float +# Right Ascension, in degrees, of the center of the area to keep +# +# dec : float +# Declination, in degerees, of the center of the area to keep +# +# box_width : float +# Full width, in arcseconds, of the area to be kept. We assume a +# square box. +# +# catalog_file : str +# Name of ascii file containing catalog +# +# ra_column_name : str +# Name of the column in the catalog containing the RA values +# +# dec_column_name : str +# Name of the column in the catalog containing the Dec values +# +# Returns +# ------- +# cropped_catalog : astropy.table.Table +# Table containing cropped catalog +# """ +# # Define min and max RA, Dec values to be kept +# ra = ra * u.deg +# dec = dec * u.deg +# box_width = box_width * u.arcsec +# +# min_ra = ra - box_width / 2 +# max_ra = ra + box_width / 2 +# min_dec = dec - box_width / 2 +# max_dec = dec + box_width / 2 +# +# # Read in input catalog +# catalog = ascii.read(catalog_file) +# +# # Check for requested columns +# if ra_column_name not in catalog.colnames: +# raise ValueError(('ERROR: {} does not contain a {} column.').format(catalog_file, ra_column_name)) +# +# if dec_column_name not in catalog.colnames: +# raise ValueError(('ERROR: {} does not contain a {} column.').format(catalog_file, dec_column_name)) +# +# # Keep only sources within the range of RA and Dec +# good = np.where((catalog[ra_column_name].data >= min_ra) & (catalog[ra_column_name].data <= max_ra) & +# (catalog[dec_column_name].data >= min_dec) & (catalog[dec_column_name].data <= max_dec))[0] +# +# cropped_catalog = Table() +# for colname in catalog.colnames: +# cropped_catalog[colname] = catalog[colname][good] +# +# return cropped_catalog +# +# +# def galactic_plane(box_width, instrument, filter_list, besancon_catalog_file, +# ra_column_name='RAJ2000', dec_column_name='DECJ2000', starting_index=1, +# wise_catalog='allwise'): +# """Convenience function to create a typical scene looking into the disk of +# the Milky Way, using the besancon function. +# +# Parameters +# ---------- +# box_width : float +# Size of the (square) sky area to be simulated, in arc-seconds +# +# instrument : str +# Name of instrument for the siumulation +# +# filter_list : list +# List of filters to use to generate the catalog. (e.g ['F480M', 'F444W']) +# +# besancon_catalog_file : str +# Name of ascii catalog containing background stars. The code was +# developed around this being a catalog output by the Besaoncon +# model (via ``besancon()``), but it can be any ascii catalog +# of sources as long as it contains the columns specified in +# the ``catalog`` parameter of ``johnson_catalog_to_mirage_catalog()`` +# If None, the Besancon step will be skipped and a catalog will be +# built using only GAIA/2MASS/WISE +# +# ra_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# right ascension of the sources. +# +# dec_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# declination of the sources +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# wise_catalog : str +# Switch that specifies the WISE catalog to be searched. Possible values are +# 'allwise' (default), which searches the ALLWISE source catalog, or +# 'wise_all_sky', which searches the older WISE All-Sky catalog. +# +# Returns +# ------- +# cat : mirage.catalogs.create_catalog.PointSourceCatalog +# Catalog with representative stars pulled from Besancon model +# """ +# galactic_longitude = 45.0 * u.deg # deg +# galactic_latitude = 0.0 * u.deg # deg +# coord = SkyCoord(galactic_longitude, galactic_latitude, frame=Galactic) +# +# cat, column_filter_list = get_all_catalogs(coord.icrs.ra.value, coord.icrs.dec.value, box_width, +# besancon_catalog_file=besancon_catalog_file, instrument=instrument, +# filters=filter_list, ra_column_name=ra_column_name, +# dec_column_name=dec_column_name, starting_index=starting_index, +# wise_catalog=wise_catalog) +# return cat +# +# +# def out_of_galactic_plane(box_width, instrument, filter_list, besancon_catalog_file, +# ra_column_name='RAJ2000', dec_column_name='DECJ2000', starting_index=1, +# wise_catalog='allwise'): +# """Convenience function to create typical scene looking out of the plane of +# the Milky Way by querying the Besancon model +# +# Parameters +# ---------- +# box_width : float +# Size of the (square) sky area to be simulated, in arc-seconds +# +# instrument : str +# Name of instrument for the siumulation +# +# filter_list : list +# List of filters to use to generate the catalog. (e.g ['F480M', 'F444W']) +# +# besancon_catalog_file : str +# Name of ascii catalog containing background stars. The code was +# developed around this being a catalog output by the Besaoncon +# model (via ``besancon()``), but it can be any ascii catalog +# of sources as long as it contains the columns specified in +# the ``catalog`` parameter of ``johnson_catalog_to_mirage_catalog()`` +# If None, the Besancon step will be skipped and a catalog will be +# built using only GAIA/2MASS/WISE +# +# ra_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# right ascension of the sources. +# +# dec_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# declination of the sources +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# wise_catalog : str +# Switch that specifies the WISE catalog to be searched. Possible values are +# 'allwise' (default), which searches the ALLWISE source catalog, or +# 'wise_all_sky', which searches the older WISE All-Sky catalog. +# +# Returns +# ------- +# cat : mirage.catalogs.create_catalog.PointSourceCatalog +# Catalog with representative stars pulled from Besancon model +# """ +# galactic_longitude = 45.0 * u.deg # deg +# galactic_latitude = 85.0 * u.deg # deg +# coord = SkyCoord(galactic_longitude, galactic_latitude, frame=Galactic) +# +# cat, column_filter_list = get_all_catalogs(coord.icrs.ra.value, coord.icrs.dec.value, box_width, +# besancon_catalog_file=besancon_catalog_file, instrument=instrument, +# filters=filter_list, ra_column_name=ra_column_name, +# dec_column_name=dec_column_name, starting_index=starting_index, +# wise_catalog=wise_catalog) +# return cat +# +# +# def galactic_bulge(box_width, instrument, filter_list, besancon_catalog_file, +# ra_column_name='RAJ2000', dec_column_name='DECJ2000', starting_index=1, wise_catalog='allwise'): +# """Convenience function to create typical scene looking into bulge of +# the Milky Way +# +# Parameters +# ---------- +# box_width : float +# Size of the (square) sky area to be simulated, in arc-seconds +# +# instrument : str +# Name of instrument for the siumulation +# +# filter_list : list +# List of filters to use to generate the catalog. (e.g ['F480M', 'F444W']) +# +# besancon_catalog_file : str +# Name of ascii catalog containing background stars. The code was +# developed around this being a catalog output by the Besaoncon +# model (via ``besancon()``), but it can be any ascii catalog +# of sources as long as it contains the columns specified in +# the ``catalog`` parameter of ``johnson_catalog_to_mirage_catalog()`` +# If None, the Besancon step will be skipped and a catalog will be +# built using only GAIA/2MASS/WISE +# +# ra_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# right ascension of the sources. +# +# dec_column_name : str +# Name of the column within ``besancon_catalog_file`` containing the +# declination of the sources +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# wise_catalog : str +# Switch that specifies the WISE catalog to be searched. Possible values are +# 'allwise' (default), which searches the ALLWISE source catalog, or +# 'wise_all_sky', which searches the older WISE All-Sky catalog. +# +# Returns +# ------- +# cat : mirage.catalogs.create_catalog.PointSourceCatalog +# Catalog with representative stars pulled from Besancon model +# """ +# galactic_longitude = 0.0 * u.deg # deg +# galactic_latitude = 5.0 * u.deg # deg +# coord = SkyCoord(galactic_longitude, galactic_latitude, frame=Galactic) +# +# cat, column_filter_list = get_all_catalogs(coord.icrs.ra.value, coord.icrs.dec.value, box_width, +# besancon_catalog_file=besancon_catalog_file, instrument=instrument, +# filters=filter_list, ra_column_name=ra_column_name, +# dec_column_name=dec_column_name, starting_index=starting_index, +# wise_catalog=wise_catalog) +# return cat +# +# +# def filter_bad_ra_dec(table_data): +# """Remove targets with bad RA and/or Dec values from the input table. +# Use the column masks to find which entries have bad RA or Dec values. +# These will be excluded from the Mirage catalog. +# +# Parameters +# ---------- +# table_data : astropy.table.Table +# Input table from e.g. 2MASS query +# +# Returns +# ------- +# position_mask : np.ndarray +# 1D boolean array. True for good sources, False for bad. +# """ +# ra_mask = ~table_data['ra'].data.mask +# dec_mask = ~table_data['dec'].data.mask +# position_mask = ra_mask & dec_mask +# return position_mask +# +# +# def generate_ra_dec(number_of_stars, ra_min, ra_max, dec_min, dec_max, seed=None): +# """ +# Generate a list of random RA, Dec values in a square region. Note that +# this assumes a small sky area so that the change in the sky area per +# a degree of right ascension is negligible. This routine will break down +# at the north or south celestial poles. +# The np uniform random number generator is used to make the source +# postions. +# +# Parameters +# ---------- +# number_of_stars: int +# The number of stars to generate positions for +# +# ra_min : float +# The minimum RA value of the area, in degrees +# +# ra_max : float +# The maximum RA value of the area, in degrees +# +# dec_min : float +# The minimum Dec value of the area, in degrees +# +# dec_max : float +# The minimum Dec value of the area, in degrees +# +# seed : int +# Optional seed for random number generator +# +# Returns +# ------- +# ra_list : numpy.ndarray +# The list of output RA values in degrees. +# +# dec_list : numpy.ndarray +# The list of output Dec values in degrees. +# """ +# delta_ra = ra_max - ra_min +# delta_dec = dec_max - dec_min +# +# # If a seed is provided, seed the generator +# if seed is not None: +# np.random.seed(seed) +# +# # Generate random numbers +# numbers = np.random.random(2 * number_of_stars) +# +# # Create RA and Dec values +# ra_list = numbers[0: number_of_stars] * delta_ra + ra_min +# dec_list = numbers[number_of_stars:] * delta_dec + dec_min +# return ra_list, dec_list +# +# +# def galaxy_background(ra0, dec0, v3rotangle, box_width, instrument, filters, +# boxflag=True, brightlimit=14.0, seed=None, starting_index=1): +# """ +# Given a sky position (ra0,dec0), and a V3 rotation angle (v3rotangle) this +# routine makes a fake galaxy background for a square region of sky or a circle +# provided that the area is smaller than the GODDS-S field. The fake galaxies +# are randomly distributed over the area and have random changes to the input +# Sersic parameters, so the field is different at every call. +# +# Parameters +# ---------- +# ra0 : float +# The sky position right ascension in decimal degrees +# +# dec0 : float +# The sky position declimation in decimal degrees +# +# v3rotangle : float +# The v3 rotation angle of the y coordinate in the image, in +# degrees E of N +# +# box_width : float +# The width of the square on the sky in arc-seconds, or the +# radius of the circle on the sky in arc-seconds; must be a +# value larger and 1.0 and smaller than 779.04 for a square +# or 439.52 for a circle. +# +# boxflag : boolean +# Flag for whether the region is a square (if True) or a +# circle (if False). +# +# instrument : str +# One of "All", "NIRCam", "NIRISS", or "Guider". +# +# filters : list +# A list of the required filters. If set to None or an empty +# list all filters are used. +# +# brightlimit : float +# The bright limit AB magnitude for the galaxies in the output +# catalogue, applied in the F200W filter (close to K-band). +# +# seed : integer or None +# If a value is given, it is used to seed the random number +# generator so that a scene can be reproduced. If the value +# is None the seed is set randomly. If a non-integer value is +# given it is converted to integer. +# +# starting_index : int +# Beginning value to use for the 'index' column of the catalog. Default is 1. +# +# Returns +# ------- +# galaxy_cat : mirage.catalogs.catalog_generate.GalaxyCatalog +# This is the output list of galaxy properties. If there is +# an error, None is returned. +# +# seedvalue : integer +# The seed value used with numpy.random to generate the values. +# """ +# logger = logging.getLogger('mirage.catalogs.create_catalog.galaxy_background') +# +# # The following is the area of the GOODS-S field catalogue from Gabe Brammer +# # in square arc-seconds +# goodss_area = 606909. +# if boxflag: +# outarea = box_width*box_width +# else: +# outarea = math.pi*box_width*box_width +# if outarea >= goodss_area: +# logger.error('Error: requested sky area is too large. Values will not be produced.') +# return None, None +# if seed is None: +# seedvalue = int(950397468.*np.random.random()) +# else: +# if not isinstance(seed, int): +# seedvalue = int(abs(seed)) +# else: +# seedvalue = seed +# np.random.seed(seedvalue) +# threshold = outarea/goodss_area +# +# # Standardize the input filter names +# std_filters = standardize_filters(instrument, filters) +# filter_names = make_mag_column_names(instrument, std_filters) +# nfilters = len(filter_names) +# if nfilters < 1: +# logger.error('Error matching filters to standard list. Inputs are:') +# logger.error('Instrument: {}'.format(instrument)) +# logger.error('Filter names: {}'.format(filters)) +# return None, None +# # add 8 to these indexes to get the columns in the GODDS-S catalogue file +# # +# # Note: NIRCam filters are used in proxy for the NIRISS long wavelength +# # filters. The broad NIRCam F150W2 filter is used as a proxy for the +# # Guiders. +# filterinds = {'niriss_f090w_magnitude': 0, 'niriss_f115w_magnitude': 1, +# 'niriss_f150w_magnitude': 2, 'niriss_f200w_magnitude': 3, +# 'niriss_f140m_magnitude': 4, 'niriss_f158m_magnitude': 5, +# 'nircam_f070w_clear_magnitude': 6, 'nircam_f090w_clear_magnitude': 7, +# 'nircam_f115w_clear_magnitude': 8, 'nircam_f150w_clear_magnitude': 9, +# 'nircam_f200w_clear_magnitude': 10, 'nircam_f150w2_clear_magnitude': 11, +# 'nircam_f140m_clear_magnitude': 12, 'nircam_f150w2_f162m_magnitude': 13, +# 'nircam_f182m_clear_magnitude': 14, 'nircam_f210m_clear_magnitude': 15, +# 'nircam_f150w2_f164n_magnitude': 16, 'nircam_f187n_clear_magnitude': 17, +# 'nircam_f212n_clear_magnitude': 18, 'nircam_f277w_clear_magnitude': 19, +# 'nircam_f356w_clear_magnitude': 20, 'nircam_f444w_clear_magnitude': 21, +# 'nircam_f322w2_clear_magnitude': 22, 'nircam_f250m_clear_magnitude': 23, +# 'nircam_f300m_clear_magnitude': 24, 'nircam_f335m_clear_magnitude': 25, +# 'nircam_f360m_clear_magnitude': 26, 'nircam_f410m_clear_magnitude': 27, +# 'nircam_f430m_clear_magnitude': 28, 'nircam_f460m_clear_magnitude': 29, +# 'nircam_f480m_clear_magnitude': 30, 'nircam_f322w2_f323n_magnitude': 31, +# 'nircam_f444w_f405n_magnitude': 32, 'nircam_f444w_f466n_magnitude': 33, +# 'nircam_f444w_f470n_magnitude': 34, 'niriss_f277w_magnitude': 19, +# 'niriss_f356w_magnitude': 20, 'niriss_f380m_magnitude': 26, +# 'niriss_f430m_magnitude': 28, 'niriss_f444w_magnitude': 21, +# 'niriss_f480m_magnitude': 30, 'fgs_guider1_magnitude': 11, +# 'fgs_guider2_magnitude': 11} +# module_path = pkg_resources.resource_filename('mirage', '') +# catalog_file = os.path.join(module_path, 'config/goodss_3dhst.v4.1.jwst_galfit.cat') +# catalog_values = np.loadtxt(catalog_file, comments='#') +# +# # Force positive values for radius, sersic index, and ellipticity +# good = ((catalog_values[:, 59] >= 0.) & (catalog_values[:, 61] > 0.) & (catalog_values[:, 63] >= 0.)) +# catalog_values = catalog_values[good, :] +# +# outinds = np.zeros((nfilters), dtype=np.int16) +# try: +# loop = 0 +# for filter in filter_names: +# outinds[loop] = filterinds[filter] + 8 +# loop = loop+1 +# except: +# logger.error('Error matching filter %s to those available in 3D-HST catalog.' % (filter)) +# return None, None +# # The following variables hold the Sersic profile index values +# # (radius [arc-seconds], sersic index, ellipticity, position angle) +# # and the assocated uncertaintie index values +# sersicinds = [59, 61, 63, 65] +# sersicerrorinds = [60, 62, 64, 66] +# ncat = catalog_values.shape[0] +# select = np.random.random(ncat) +# magselect = np.copy(catalog_values[:, filterinds['niriss_f200w_magnitude']+8]) +# outputinds = [] +# for loop in range(ncat): +# if (magselect[loop] >= brightlimit) and (select[loop] < threshold): +# outputinds.append(loop) +# nout = len(outputinds) +# if boxflag: +# delx0 = (-0.5+np.random.random(nout))*box_width/3600. +# dely0 = (-0.5+np.random.random(nout))*box_width/3600. +# radius = np.sqrt(delx0*delx0+dely0*dely0) +# angle = np.arctan2(delx0, dely0)+v3rotangle*math.pi/180. +# else: +# radius = box_width*np.sqrt(np.random.random(nout))/3600. +# angle = 2.*math.pi*np.random.random(nout) +# delx = radius*np.cos(angle) +# dely = radius*np.sin(angle) +# raout = delx * 0. +# decout = dely * 0. +# for loop in range(len(delx)): +# raout[loop], decout[loop] = deproject_from_tangent_plane(delx[loop], dely[loop], ra0, dec0) +# while raout[loop] < 0.: +# raout[loop] += 360. +# rot1 = 360.*np.random.random(nout)-180. +# rout = np.copy(catalog_values[outputinds, sersicinds[0]]) +# drout = np.copy(catalog_values[outputinds, sersicerrorinds[0]]) +# rout = rout+2.*drout*np.random.normal(0., 1., nout) +# rout[rout < 0.01] = 0.01 +# max_rad = np.max(catalog_values[:, sersicinds[0]]) +# rout[rout > max_rad] = max_rad +# elout = np.copy(catalog_values[outputinds, sersicinds[2]]) +# delout = np.copy(catalog_values[outputinds, sersicerrorinds[2]]) +# elout = elout+delout*np.random.normal(0., 1., nout) +# elout[elout > 0.98] = 0.98 +# elout[elout < 0.] = 0.0 +# sindout = np.copy(catalog_values[outputinds, sersicinds[1]]) +# dsindout = np.copy(catalog_values[outputinds, sersicinds[1]]) +# sindout = sindout+dsindout*np.random.normal(0., 1., nout) +# sindout[sindout < 0.1] = 0.1 +# max_sersic = np.max(catalog_values[:, sersicinds[1]]) +# sindout[sindout > max_sersic] = max_sersic +# paout = np.copy(catalog_values[outputinds, sersicinds[3]]) +# dpaout = np.copy(catalog_values[outputinds, sersicinds[3]]) +# paout = paout+dpaout*np.random.normal(0., 1., nout) +# for loop in range(len(paout)): +# if paout[loop] < -180.: +# paout[loop] = paout[loop]+360. +# if paout[loop] > 180.: +# paout[loop] = paout[loop]-360. +# galaxy_cat = GalaxyCatalog(ra=raout, dec=decout, ellipticity=elout, +# radius=rout, sersic_index=sindout, +# position_angle=paout, radius_units='arcsec', +# starting_index=starting_index) +# for loop in range(len(filter_names)): +# mag1 = catalog_values[outputinds, outinds[loop]] +# dmag1 = -0.2*np.random.random(nout)+0.1 +# mag1 = mag1 + dmag1 +# +# galaxy_cat.add_magnitude_column(mag1, column_name=filter_names[loop], +# magnitude_system='abmag') +# return galaxy_cat, seedvalue diff --git a/exoctk/contam_visibility/visibilityPA.py b/exoctk/contam_visibility/visibilityPA.py index 87afc8f3..eb3c1daf 100755 --- a/exoctk/contam_visibility/visibilityPA.py +++ b/exoctk/contam_visibility/visibilityPA.py @@ -21,7 +21,8 @@ import numpy as np from . import ephemeris_old2x as EPH -from jwst_gtvt.find_tgt_info import get_table +# from jwst_gtvt.find_tgt_info import get_table +from ..utils import blockPrint, enablePrint D2R = math.pi / 180. # degrees to radians @@ -134,7 +135,7 @@ def checkVisPA(ra, dec, targetName=None, ephFileName=None, fig=None): if fig is None or fig: tools = 'crosshair, reset, hover, save' radec = ', '.join([str(ra), str(dec)]) - fig = figure(tools=tools, plot_width=800, plot_height=400, + fig = figure(tools=tools, width=800, height=400, x_axis_type='datetime', title=targetName or radec) @@ -194,8 +195,6 @@ def checkVisPA(ra, dec, targetName=None, ephFileName=None, fig=None): berr_x = np.concatenate([gdMaskednum[i0_bot:i1_bot + 1], gdMaskednum[i0_bot:i1_bot + 1][::-1]]) fig.patch(berr_x, berr_y, color='red', fill_alpha=0.2, line_alpha=0) - from bokeh.plotting import show - show(fig) # Plot formatting fig.xaxis.axis_label = 'Date' @@ -204,29 +203,7 @@ def checkVisPA(ra, dec, targetName=None, ephFileName=None, fig=None): return paGood, paBad, gd, fig -def fill_between(fig, xdata, pamin, pamax, **kwargs): - # addressing NIRSpec issue - - # now creating the patches for the arrays - nanbot = np.where([np.isnan(i) for i in pamin])[0] - nantop = np.where([np.isnan(i) for i in pamax])[0] - yb = np.split(pamin, nanbot) - xs = np.split(xdata, nanbot) - yt = np.split(pamax, nantop) - for x, bot, top in zip(xs, yb, yt): - x = np.append(x, x[::-1]) - y = np.append(bot, top[::-1]) - fig.patch(x, y, **kwargs) - return fig - - -def using_gtvt( - ra, - dec, - instrument, - targetName='noName', - ephFileName=None, - output='bokeh'): +def using_gtvt(ra, dec, instrument, targetName='noName', ephFileName=None, output='bokeh'): """Plot the visibility (at a range of position angles) against time. Parameters @@ -259,7 +236,9 @@ def using_gtvt( """ # Getting calculations from GTVT (General Target Visibility Tool) + # blockPrint() tab = get_table(ra, dec) + # enablePrint() gd = tab['Date'] paMin = tab[str(instrument) + ' min'] @@ -306,8 +285,8 @@ def using_gtvt( # Time to plot if output == 'bokeh': fig = figure(tools=TOOLS, - plot_width=800, - plot_height=400, + width=800, + height=400, x_axis_type='datetime', title='{} Visibility with {}'.format(targetName, instrument)) @@ -393,16 +372,9 @@ def using_gtvt( # This addresses a bokeh shading issue that accidentally shades # accessible PAs (e.g: trappist-1b) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - remove_pa = [] - for badpa in badPAs: - for panom in paNomnan: - diff = np.abs(badpa - panom) - if diff < 7: - remove_pa.append(badpa) + badPAs = select_badPAs_ge_paNomnan(badPAs, paNomnan) - for pa in np.unique(remove_pa): - badPAs.remove(pa) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~NOTE~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Grouping the bad PAs into lists within the badPAs list. # This will make bad PA shading easier in the contamination Bokeh plot @@ -422,9 +394,40 @@ def using_gtvt( elif ((badPAs[idx - 1] + 1) < badPAs[idx]): grouped_badPAs.append([badPAs[idx]]) - grouped_badPAs = np.asarray(grouped_badPAs) + # grouped_badPAs = np.asarray(grouped_badPAs) else: # Accounting for targets with 100% visibility - grouped_badPAs = np.asarray([]) + + grouped_badPAs = [] #np.asarray([]) return paMin, paMax, gd, fig, table, grouped_badPAs + + +def select_badPAs_ge_paNomnan(badPAs, paNomnan, threshold=7): + """Returns the absolute difference between each badPAs and paNomnan + Should be greater than threshold (default=7) + + Parameters + ---------- + badPAs: list + The list of bad position angles + paNomnan: list + The list of nominal PAs + + Returns + ------- + np.ndarray + The array of PAs + """ + # Reshaping + badPAs_array = np.array(badPAs)[np.newaxis] # (1, len(badPAs)) + paNomnan_array = np.array(paNomnan)[np.newaxis].T # (len(paNomnan), 1) + + # elementwise absolute difference + diff = np.abs(np.subtract(badPAs_array, paNomnan_array)) # (len(paNomnan), len(badPAs)) + + # boolean array above threshold + above_thresh = np.all(diff >= threshold, axis=0) # (len(badPAs),) + + # index and return those that are above threshold + return badPAs_array[0, above_thresh] \ No newline at end of file diff --git a/exoctk/contam_visibility/vpa.py b/exoctk/contam_visibility/vpa.py new file mode 100644 index 00000000..99170fbb --- /dev/null +++ b/exoctk/contam_visibility/vpa.py @@ -0,0 +1,179 @@ +def calc_v3pa(ra, dec, aperture, pa, plot=True): + # Converting from decimal degrees + targetcrd = crd.SkyCoord(ra=ra, dec=dec, unit='deg').to_string('hmsdms') + + # Querying for neighbors with 2MASS IRSA's fp_psc (point-source catalog) + info = Irsa.query_region(targetcrd, catalog='fp_psc', spatial='Cone', radius=2.5 * u.arcmin) + # Use Gaia instead? + + # Coordinates of all stars in FOV, including target + allRA = info['ra'].data.data + allDEC = info['dec'].data.data + + # Initiating a dictionary to hold all relevant star information + stars = {} + stars['RA'], stars['DEC'] = allRA, allDEC + + # Finding the target index which will be used to select the target in our position dictionaries + sindRA = (ra - stars['RA']) * np.cos(dec) + cosdRA = dec - stars['DEC'] + stars['distance'] = np.sqrt(sindRA ** 2 + cosdRA ** 2) + targetIndex = np.argmin(stars['distance']) + + # 2MASS - Teff relations + jhMod = np.array( + [0.545, 0.561, 0.565, 0.583, 0.596, 0.611, 0.629, 0.642, 0.66, 0.679, 0.696, 0.71, 0.717, 0.715, 0.706, 0.688, + 0.663, 0.631, 0.601, 0.568, 0.537, 0.51, 0.482, 0.457, 0.433, 0.411, 0.39, 0.37, 0.314, 0.279]) + hkMod = np.array( + [0.313, 0.299, 0.284, 0.268, 0.257, 0.247, 0.24, 0.236, 0.229, 0.217, 0.203, 0.188, 0.173, 0.159, 0.148, 0.138, + 0.13, 0.123, 0.116, 0.112, 0.107, 0.102, 0.098, 0.094, 0.09, 0.086, 0.083, 0.079, 0.07, 0.067]) + teffMod = np.array( + [2800, 2900, 3000, 3100, 3200, 3300, 3400, 3500, 3600, 3700, 3800, 3900, 4000, 4100, 4200, 4300, 4400, 4500, + 4600, 4700, 4800, 4900, 5000, 5100, 5200, 5300, 5400, 5500, 5800, 6000]) + + # JHK bands of all stars in FOV, including target + Jmag = info['j_m'].data.data + Hmag = info['h_m'].data.data + Kmag = info['k_m'].data.data + + # J-H band, H-K band. This will be used to derive the stellar Temps later + J_Hobs = Jmag - Hmag + H_Kobs = Hmag - Kmag + + # Number of stars + nStars = stars['RA'].size + + # Find/assign Teff of each star + starsT = np.empty(nStars) + for j in range(nStars): + color_separation = (J_Hobs[j] - jhMod) ** 2 + (H_Kobs[j] - hkMod) ** 2 + min_separation_ind = np.argmin(color_separation) + starsT[j] = teffMod[min_separation_ind] + + # Record keeping + stars['Temp'] = starsT + inst, full_aper, xleft, xright, ybot, ytop = APERTURES[aperture] + + # Instantiate SIAF object + siaf = pysiaf.Siaf(inst) + + aper = siaf.apertures[aperture] + full = siaf.apertures[full_aper] + + # DET_targ -> TEL_targ -> get attitude matrix for target -> TEL_neighbor -> DET_neighbor -> SCI_neighbor + xSweet, ySweet = aper.reference_point('det') + v2targ, v3targ = aper.det_to_tel(xSweet, ySweet) + attitude = pysiaf.utils.rotations.attitude_matrix(v2targ, v3targ, ra, dec, pa) + + xdet, ydet = [], [] + xsci, ysci = [], [] + for starRA, starDEC in zip(stars['RA'], stars['DEC']): + # Get the TEL coordinates of each star using the attitude matrix of the target + V2, V3 = pysiaf.utils.rotations.sky_to_tel(attitude, starRA, starDEC) + # Convert to arcsec and turn to a float + V2, V3 = V2.to(u.arcsec).value, V3.to(u.arcsec).value + + XDET, YDET = aper.tel_to_det(V2, V3) + XSCI, YSCI = aper.det_to_sci(XDET, YDET) + + xdet.append(XDET) + ydet.append(YDET) + xsci.append(XSCI) + ysci.append(YSCI) + + stars['xdet'], stars['ydet'] = np.array(xdet), np.array(ydet) + stars['xsci'], stars['ysci'] = np.array(xsci), np.array(ysci) + + # Full Frame dimensions + rows, cols = full.corners('det') + minrow, maxrow = rows.min(), rows.max() + mincol, maxcol = cols.min(), cols.max() + + # Just stars in FOV (Should always have at least 1, the target) + inFOV = [] + for star in range(nStars): + + x, y = stars['xdet'][star], stars['ydet'][star] + # print(x, y, mincol, maxcol, minrow, maxrow) + if (mincol < x) & (x < maxcol) & (minrow < y) & (y < maxrow): + inFOV.append(star) + + # Only keep stars in FOV + FOVstars = {k: v[inFOV] for k, v in stars.items()} + # FOVstars = stars + + # Figure + if plot: + cds = ColumnDataSource(data=FOVstars) + mapper = linear_cmap(field_name='Temp', palette=Spectral6, low=min(starsT), high=max(starsT)) + fig = figure(title='Generated FOV from 2MASS IRSA fp_psc - RA: {}, DEC: {}'.format(ra, dec), match_aspect=True) + fullstr = str(full.AperName.replace('_', ' ')) + fig.title = 'The FOV in SCIENCE coordinates at APA {} for {}'.format(pa, aperture) + # print('aper corners', aper.corners('sci')) + # print('full corners', full.corners('sci')) + fig.patch(full.corners('sci')[0], full.corners('sci')[1], color="black", alpha=0.1) + fig.patch(aper.corners('sci')[0], aper.corners('sci')[1], line_color="blue", fill_color='blue', fill_alpha=0.1) + + # Fine-tune trace dims + trace = get_trace(aperture) + # print(trace.shape, xleft, xright, ybot, ytop) + trace = trace[xleft:-xright, ybot:-ytop] + + # Get subarray dims + subX, subY = aper.XSciSize, aper.YSciSize + + # Make frame + frame = np.zeros((subY, subX)) + + # Plotting the trace footprints + for n, (x, y) in enumerate(zip(FOVstars['xsci'], FOVstars['ysci'])): + + if 'NIS' in aperture: + ptrace = trace.T + height, width = ptrace.shape + x0 = x - width + 68 + y0 = y - height + + elif 'F322W2' in aperture: + ptrace = trace + height, width = ptrace.shape + x0 = x - width + 467 # 2048 - 1581 + y0 = y - height / 2 + + elif 'F356W' in aperture: + ptrace = trace + height, width = ptrace.shape + x0 = x - width + 467 # 2048 - 1581 + y0 = y - height / 2 + + elif 'F277W' in aperture: + ptrace = trace + height, width = ptrace.shape + x0 = x - width - 600 + y0 = y - height / 2 + + elif 'F444W' in aperture: + ptrace = trace.T[:, ::-1] + height, width = ptrace.shape + x0 = x - width + 1096 # 2048 - 952 + y0 = y - height / 2 + + else: + ptrace = trace + height, width = ptrace.shape + x0 = x - width / 2 + y0 = y - height + 113 # 529 - 416 + + if plot: + fig.image(image=[ptrace], x=x0, dw=width, y=y0, dh=height, alpha=0.5) + + # Stars + if plot: + fig.star('xsci', 'ysci', color=mapper, source=cds, size=12) + fig.circle(stars['xsci'][targetIndex], stars['ysci'][targetIndex], size=15, fill_color=None, line_color='red') + color_bar = ColorBar(color_mapper=mapper['transform'], width=8, location=(0, 0), title="Teff") + fig.add_layout(color_bar, 'right') + + show(fig) + + return frame \ No newline at end of file diff --git a/exoctk/data/contam_visibility/predicted_gaia_colour.txt b/exoctk/data/contam_visibility/predicted_gaia_colour.txt new file mode 100644 index 00000000..592d1e45 --- /dev/null +++ b/exoctk/data/contam_visibility/predicted_gaia_colour.txt @@ -0,0 +1,79 @@ + 2300.0 4.490760 + 2400.0 4.590830 + 2500.0 4.387610 + 2600.0 4.138520 + 2700.0 3.817790 + 2800.0 3.508190 + 2900.0 3.240710 + 3000.0 3.009190 + 3100.0 2.813890 + 3200.0 2.645050 + 3300.0 2.494330 + 3400.0 2.355600 + 3500.0 2.222030 + 3600.0 2.095770 + 3700.0 1.980830 + 3800.0 1.879950 + 3900.0 1.787770 + 4000.0 1.705480 + 4100.0 1.628010 + 4200.0 1.552780 + 4300.0 1.482780 + 4400.0 1.415280 + 4500.0 1.355920 + 4600.0 1.301030 + 4700.0 1.249230 + 4800.0 1.202510 + 4900.0 1.156250 + 5000.0 1.113670 + 5100.0 1.072810 + 5200.0 1.033830 + 5300.0 0.996650 + 5400.0 0.962530 + 5500.0 0.929240 + 5600.0 0.895500 + 5700.0 0.862350 + 5800.0 0.834840 + 5900.0 0.809540 + 6000.0 0.775450 + 6100.0 0.746800 + 6200.0 0.717900 + 6300.0 0.692530 + 6400.0 0.662820 + 6500.0 0.638500 + 6600.0 0.611290 + 6700.0 0.585860 + 6800.0 0.560850 + 6900.0 0.536710 + 7000.0 0.512700 + 7200.0 0.465250 + 7400.0 0.428650 + 7600.0 0.371470 + 7800.0 0.326780 + 8000.0 0.282700 + 8200.0 0.241240 + 8400.0 0.200860 + 8600.0 0.164260 + 8800.0 0.133930 + 9000.0 0.108790 + 9200.0 0.083830 + 9400.0 0.061250 + 9600.0 0.039230 + 9800.0 0.022370 +10000.0 0.005390 +10200.0 -0.010070 +10400.0 -0.024240 +10600.0 -0.037250 +10800.0 -0.049180 +11000.0 -0.060120 +11200.0 -0.070140 +11400.0 -0.079370 +11600.0 -0.087850 +11800.0 -0.095710 +12000.0 -0.103030 +12500.0 -0.119640 +13000.0 -0.134480 +13500.0 -0.148320 +14000.0 -0.158960 +14500.0 -0.174410 +15000.0 -0.186930 diff --git a/exoctk/exoctk_app/app_exoctk.py b/exoctk/exoctk_app/app_exoctk.py index 8fa1d905..2618b0f0 100644 --- a/exoctk/exoctk_app/app_exoctk.py +++ b/exoctk/exoctk_app/app_exoctk.py @@ -16,7 +16,8 @@ import numpy as np from exoctk import log_exoctk -from exoctk.contam_visibility import visibilityPA as vpa +from exoctk.contam_visibility.new_vis_plot import build_visibility_plot, get_exoplanet_positions +#from exoctk.contam_visibility import visibilityPA as vpa from exoctk.contam_visibility import field_simulator as fs from exoctk.contam_visibility import contamination_figure as cf from exoctk.contam_visibility.miniTools import contamVerify @@ -105,134 +106,6 @@ def check_auth(username, password): return username == 'admin' and password == 'secret' -@app_exoctk.route('/contam_visibility', methods=['GET', 'POST']) -def contam_visibility(): - """The contamination and visibility form page - - Returns - ------- - ``flask.render_template`` obj - The rendered template for the contamination and visibility page. - """ - - # Load default form - form = fv.ContamVisForm() - form.calculate_contam_submit.disabled = False - - if request.method == 'GET': - - # http://0.0.0.0:5000/contam_visibility?ra=24.354208334287005&dec=-45.677930555343636&target=WASP-18%20b - target_name = request.args.get('target') - form.targname.data = target_name - - ra = request.args.get('ra') - form.ra.data = ra - - dec = request.args.get('dec') - form.dec.data = dec - - return render_template('contam_visibility.html', form=form) - - # Reload page with stellar data from ExoMAST - if form.resolve_submit.data: - - if form.targname.data.strip() != '': - - # Resolve the target in exoMAST - try: - form.targname.data = get_canonical_name(form.targname.data) - data, url = get_target_data(form.targname.data) - - # Update the coordinates - ra_deg = data.get('RA') - dec_deg = data.get('DEC') - - # Set the form values - form.ra.data = ra_deg - form.dec.data = dec_deg - form.target_url.data = url - - except Exception: - form.target_url.data = '' - form.targname.errors = ["Sorry, could not resolve '{}' in exoMAST.".format(form.targname.data)] - - # Send it back to the main page - return render_template('contam_visibility.html', form=form) - - # Reload page with appropriate mode data - if form.mode_submit.data: - - # Update the button - if ('NIRCam' in form.inst.data) or (form.inst.data == 'MIRI') or (form.inst.data == 'NIRSpec'): - form.calculate_contam_submit.disabled = True - else: - form.calculate_contam_submit.disabled = False - - # Send it back to the main page - return render_template('contam_visibility.html', form=form) - - if form.validate_on_submit() and (form.calculate_submit.data or form.calculate_contam_submit.data): - - try: - - # Log the form inputs - log_exoctk.log_form_input(request.form, 'contam_visibility', DB) - - # Make plot - title = form.targname.data or ', '.join([str(form.ra.data), str(form.dec.data)]) - pG, pB, dates, vis_plot, table, badPAs = vpa.using_gtvt(str(form.ra.data), - str(form.dec.data), - form.inst.data.split(' ')[0], - targetName=str(title)) - - # Make output table - fh = io.StringIO() - table.write(fh, format='csv', delimiter=',') - visib_table = fh.getvalue() - - # Get scripts - vis_js = INLINE.render_js() - vis_css = INLINE.render_css() - vis_script, vis_div = components(vis_plot) - - # Contamination plot too - if form.calculate_contam_submit.data: - - # First convert ra and dec to HH:MM:SS - ra_deg, dec_deg = float(form.ra.data), float(form.dec.data) - sc = SkyCoord(ra_deg, dec_deg, unit='deg') - ra_dec = sc.to_string('hmsdms') - ra_hms, dec_dms = ra_dec.split(' ')[0], ra_dec.split(' ')[1] - - # Make field simulation - contam_cube = fs.fieldSim(ra_hms, dec_dms, form.inst.data, binComp=form.companion.data) - contam_plot = cf.contam(contam_cube, form.inst.data, targetName=str(title), paRange=[int(form.pa_min.data), int(form.pa_max.data)], badPAs=badPAs, fig='bokeh') - - # Get scripts - contam_js = INLINE.render_js() - contam_css = INLINE.render_css() - contam_script, contam_div = components(contam_plot) - - else: - - contam_script = contam_div = contam_js = contam_css = '' - - return render_template('contam_visibility_results.html', - form=form, vis_plot=vis_div, - vis_table=visib_table, - vis_script=vis_script, vis_js=vis_js, - vis_css=vis_css, contam_plot=contam_div, - contam_script=contam_script, - contam_js=contam_js, - contam_css=contam_css) - - except Exception as e: - err = 'The following error occurred: ' + str(e) - return render_template('groups_integrations_error.html', err=err) - - return render_template('contam_visibility.html', form=form) - - @app_exoctk.route('/download', methods=['POST']) def exoctk_savefile(): """Save results to file @@ -514,6 +387,194 @@ def groups_integrations(): return render_template('groups_integrations.html', form=form, sat_data=sat_data) +@app_exoctk.route('/pa_contam', methods=['GET', 'POST']) +def pa_contam(): + """The contamination and visibility form page + + Returns + ------- + ``flask.render_template`` obj + The rendered template for the contamination and visibility page. + + """ + return render_template('pa_contam.html') + + +@app_exoctk.route('/contam_visibility', methods=['GET', 'POST']) +def contam_visibility(): + """The contamination and visibility form page + + Returns + ------- + ``flask.render_template`` obj + The rendered template for the contamination and visibility page. + + """ + # Load default form + form = fv.ContamVisForm() + form.calculate_contam_submit.disabled = False + + if request.method == 'GET': + + # http://0.0.0.0:5000/contam_visibility?ra=24.354208334287005&dec=-45.677930555343636&target=WASP-18%20b + target_name = request.args.get('target') + form.targname.data = target_name + + ra = request.args.get('ra') + form.ra.data = ra + + dec = request.args.get('dec') + form.dec.data = dec + + return render_template('contam_visibility.html', form=form) + + # Reload page with stellar data from ExoMAST + if form.resolve_submit.data: + + if form.targname.data.strip() != '': + + # Resolve the target in exoMAST + try: + form.targname.data = get_canonical_name(form.targname.data) + data, url = get_target_data(form.targname.data) + + # Update the coordinates + ra_deg = data.get('RA') + dec_deg = data.get('DEC') + + # Set the form values + form.ra.data = ra_deg + form.dec.data = dec_deg + form.target_url.data = url + + except Exception: + form.target_url.data = '' + form.targname.errors = ["Sorry, could not resolve '{}' in exoMAST.".format(form.targname.data)] + + # Send it back to the main page + return render_template('contam_visibility.html', form=form) + + # Reload page with appropriate mode data + if form.mode_submit.data: + + # Update the button + if form.inst.data == 'NIRSpec': + form.calculate_contam_submit.disabled = True + else: + form.calculate_contam_submit.disabled = False + + # Send it back to the main page + return render_template('contam_visibility.html', form=form) + + if form.validate_on_submit() and (form.calculate_submit.data or form.calculate_contam_submit.data): + + instrument = fs.APERTURES[form.inst.data]['inst'] + + try: + + # Log the form inputs + log_exoctk.log_form_input(request.form, 'contam_visibility', DB) + + # Make plot + title = form.targname.data or ', '.join([str(form.ra.data), str(form.dec.data)]) + vis_plot = build_visibility_plot(str(title), instrument, str(form.ra.data), str(form.dec.data)) + table = get_exoplanet_positions(str(form.ra.data), str(form.dec.data)) + + # Make output table + vis_table = table.to_csv() + + # Get scripts + vis_js = INLINE.render_js() + vis_css = INLINE.render_css() + vis_script, vis_div = components(vis_plot) + + # Contamination plot too + if form.calculate_contam_submit.data: + + # Get RA and Dec in degrees + ra_deg, dec_deg = float(form.ra.data), float(form.dec.data) + + # Add companion + try: + comp_teff = float(form.teff.data) + except TypeError: + comp_teff = None + try: + comp_mag = float(form.delta_mag.data) + except TypeError: + comp_mag = None + try: + comp_dist = float(form.dist.data) + except TypeError: + comp_dist = None + try: + comp_pa = float(form.pa.data) + except TypeError: + comp_pa = None + + # Get PA value + pa_val = float(form.v3pa.data) + if pa_val == -1: + + # Add a companion + companion = None + if comp_teff is not None and comp_mag is not None and comp_dist is not None and comp_pa is not None: + companion = {'name': 'Companion', 'ra': ra_deg, 'dec': dec_deg, 'teff': comp_teff, 'delta_mag': comp_mag, 'dist': comp_dist, 'pa': comp_pa} + + # Make field simulation + targframe, starcube, results = fs.field_simulation(ra_deg, dec_deg, form.inst.data, binComp=companion, plot=False, multi=False) + + # Make the plot + # contam_plot = fs.contam_slider_plot(results) + + # Get bad PA list from missing angles between 0 and 360 + badPAs = [j for j in np.arange(0, 360) if j not in [i['pa'] for i in results]] + + # Make old contam plot + starCube = np.zeros((362, 2048, 256)) + starCube[0, :, :] = (targframe[0]).T[::-1, ::-1] + starCube[1, :, :] = (targframe[1]).T[::-1, ::-1] + starCube[2:, :, :] = starcube.swapaxes(1, 2)[:, ::-1, ::-1] + contam_plot = cf.contam(starCube, 'NIS_SUBSTRIP256', targetName=form.targname.data, badPAs=badPAs) + + else: + + # Get stars + stars = fs.find_stars(ra_deg, dec_deg, verbose=False) + + # Add companion + print(comp_teff, comp_mag, comp_dist, comp_pa) + if comp_teff is not None and comp_mag is not None and comp_dist is not None and comp_pa is not None: + stars = fs.add_star(stars, 'Companion', ra_deg, dec_deg, comp_teff, delta_mag=comp_mag, dist=comp_dist, pa=comp_pa) + + # Calculate contam + result, contam_plot = fs.calc_v3pa(pa_val, stars, 'NIS_SUBSTRIP256', plot=True, verbose=False) + + # Get scripts + contam_js = INLINE.render_js() + contam_css = INLINE.render_css() + contam_script, contam_div = components(contam_plot) + + else: + + contam_script = contam_div = contam_js = contam_css = pa_val = '' + + return render_template('contam_visibility_results.html', + form=form, vis_plot=vis_div, + vis_table=vis_table, + vis_script=vis_script, vis_js=vis_js, + vis_css=vis_css, contam_plot=contam_div, + contam_script=contam_script, + contam_js=contam_js, + contam_css=contam_css, pa_val=pa_val) + + except Exception as e: + err = 'The following error occurred: ' + str(e) + return render_template('groups_integrations_error.html', err=err) + + return render_template('contam_visibility.html', form=form) + + # Redirect to the index @app_exoctk.route('/') @app_exoctk.route('/index') @@ -665,8 +726,8 @@ def empty_fields(form): # Make filter object and plot bandpass = Throughput(form.bandpass.data, **kwargs) bk_plot = bandpass.plot(draw=False) - bk_plot.plot_width = 580 - bk_plot.plot_height = 280 + bk_plot.width = 580 + bk_plot.height = 280 js_resources = INLINE.render_js() css_resources = INLINE.render_css() filt_script, filt_plot = components(bk_plot) @@ -949,14 +1010,16 @@ def save_visib_result(): flask.Response object with the results of the visibility only calculation. """ - - visib_table = flask.request.form['data_file'] - targname = flask.request.form['targetname'] + visib_table = request.form['vis_table'] + targname = request.form['targetname'] targname = targname.replace(' ', '_') # no spaces - instname = flask.request.form['instrumentname'] + instname = request.form['instrumentname'] + + resp = make_response(visib_table) + resp.headers["Content-Disposition"] = "attachment; filename={}_{}_visibility.csv".format(targname, instname) + resp.headers["Content-Type"] = "text/csv" - return flask.Response(visib_table, mimetype="text/dat", - headers={"Content-disposition": "attachment; filename={}_{}_visibility.csv".format(targname, instname)}) + return resp @app_exoctk.route('/admin') diff --git a/exoctk/exoctk_app/form_validation.py b/exoctk/exoctk_app/form_validation.py index 563ad65d..e7837c4c 100644 --- a/exoctk/exoctk_app/form_validation.py +++ b/exoctk/exoctk_app/form_validation.py @@ -4,7 +4,7 @@ import numpy as np from svo_filters import svo from wtforms import StringField, SubmitField, DecimalField, RadioField, SelectField, SelectMultipleField, IntegerField, FloatField -from wtforms.validators import InputRequired, NumberRange, Optional +from wtforms.validators import InputRequired, Length, NumberRange, AnyOf, ValidationError, Optional from wtforms.widgets import ListWidget, CheckboxInput from exoctk.modelgrid import ModelGrid @@ -38,10 +38,12 @@ class ContamVisForm(BaseForm): # Form inputs ra = DecimalField('ra', validators=[NumberRange(min=0, max=360, message='RA must be between 0 and 360 degrees')]) dec = DecimalField('dec', validators=[NumberRange(min=-90, max=90, message='Declinaton must be between -90 and 90 degrees')]) - inst = SelectField('inst', choices=[('NIRISS', 'NIRISS - SOSS'), ('NIRCam F322W2', 'NIRCam - Grism Time Series (F322W2)'), ('NIRCam F444W', 'NIRCam - Grism Time Series (F444W)'), ('MIRI', 'MIRI - LRS'), ('NIRSpec', 'NIRSpec (Visibility Only)')]) - companion = StringField('companion', default='') - pa_min = DecimalField('pa_min', default=0, validators=[NumberRange(min=0, max=360, message='Minimum PA must be between 0 and 360 degrees')]) - pa_max = DecimalField('pa_max', default=360, validators=[NumberRange(min=0, max=360, message='Maximum PA must be between 0 and 360 degrees')]) + v3pa = DecimalField('v3pa', default=-1, validators=[NumberRange(min=-1, max=360, message='PA must be between 0 and 360 degrees')]) + inst = SelectField('inst', choices=[('NIS_SUBSTRIP256', 'NIRISS - SOSS - SUBSTRIP256'), ('NIS_SUBSTRIP96', 'NIRISS - SOSS - SUBSTRIP96'), ('NRCA5_GRISM256_F322W2', 'NIRCam - Grism Time Series - F322W2 (Visibility Only)'), ('NRCA5_GRISM256_F444W', 'NIRCam - Grism Time Series - F444W (Visibility Only)'), ('MIRI_SLITLESSPRISM', 'MIRI - LRS (Visibility Only)'), ('NIRSpec', 'NIRSpec (Visibility Only)')]) + delta_mag = DecimalField('delta_mag', default=None, validators=[Optional()]) + dist = DecimalField('dist', default=None, validators=[Optional()]) + pa = DecimalField('pa', default=None, validators=[Optional(), NumberRange(min=0, max=360, message='PA must be between 0 and 360 degrees')]) + teff = DecimalField('teff', validators=[Optional(), NumberRange(min=500, max=6000, message="Effective Temperature must be between 500 and 6000K")]) class FortneyModelForm(BaseForm): diff --git a/exoctk/exoctk_app/templates/base.html b/exoctk/exoctk_app/templates/base.html index c97fa0da..49aa0263 100644 --- a/exoctk/exoctk_app/templates/base.html +++ b/exoctk/exoctk_app/templates/base.html @@ -146,7 +146,7 @@
Running
- exoctk v1.2.4{{version|safe}}
+ exoctk v1.2.5{{version|safe}}
Admin Area diff --git a/exoctk/exoctk_app/templates/contam_visibility.html b/exoctk/exoctk_app/templates/contam_visibility.html index 262ed184..5120f0fc 100644 --- a/exoctk/exoctk_app/templates/contam_visibility.html +++ b/exoctk/exoctk_app/templates/contam_visibility.html @@ -34,7 +34,7 @@
Potential caveats:
{{ error }}
+ {% endfor %} + +