Skip to content

Commit

Permalink
Merge pull request #70 from rahil-makadia/dev
Browse files Browse the repository at this point in the history
Gaia; time; impact coordinate search
  • Loading branch information
rahil-makadia authored Jun 4, 2024
2 parents b44f0c3 + ba357c9 commit 91b120a
Show file tree
Hide file tree
Showing 53 changed files with 1,488 additions and 1,362 deletions.
8 changes: 2 additions & 6 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,18 @@ on:

jobs:
sphinx:
runs-on: macos-14
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.11
- name: Set up Xcode
uses: maxim-lobanov/setup-xcode@v1.6.0
with:
xcode-version: '15.1.0'
- name: Install GRSS (including dependencies)
run: |
python3 -m pip install --upgrade pip
brew install pandoc doxygen
sudo apt-get install pandoc doxygen
source initialize.sh
pip3 install .
- name: Build docs
Expand Down
16 changes: 8 additions & 8 deletions .github/workflows/pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-14]
os: [ubuntu-latest] #, macos-latest]
steps:
- name: Checkout repo
uses: actions/checkout@v4
Expand All @@ -26,15 +26,15 @@ jobs:
with:
python-version: "3.11"
- name: Set up Xcode
if: matrix.os == 'macos-14'
if: matrix.os == 'macos-latest'
uses: maxim-lobanov/setup-xcode@v1.6.0
with:
xcode-version: '15.1.0'
- name: Install doxygen
if: matrix.os == 'ubuntu-latest'
run: sudo apt-get install doxygen
- name: Install doxygen
if: matrix.os == 'macos-14'
if: matrix.os == 'macos-latest'
run: brew install doxygen
- name: Initialize repository
run: |
Expand All @@ -55,11 +55,11 @@ jobs:
with:
name: dist-ubuntu-latest
path: dist
- name: Download MacOS distribution from previous job
uses: actions/download-artifact@master
with:
name: dist-macos-14
path: dist
# - name: Download MacOS distribution from previous job
# uses: actions/download-artifact@master
# with:
# name: dist-macos-latest
# path: dist
- name: Show files
run: ls -l dist
- name: Publish to Test PyPI
Expand Down
8 changes: 2 additions & 6 deletions .github/workflows/python_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,18 @@ on:

jobs:
prop-and-fit:
runs-on: macos-14
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.11
- name: Set up Xcode
uses: maxim-lobanov/setup-xcode@v1.6.0
with:
xcode-version: '15.1.0'
- name: Install GRSS (including dependencies)
run: |
python3 -m pip install --upgrade pip
brew install doxygen
sudo apt-get install doxygen
source initialize.sh
pip3 install .
- name: Run tests
Expand Down
17 changes: 13 additions & 4 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
# minimum required CMake version
cmake_minimum_required(VERSION 3.18.0)

# if on a Mac, use gnu compilers instead of clang for ease of use with OpenMP
# if on a Mac, use any GNU compiler from Homebrew
if(APPLE)
set(CMAKE_C_COMPILER gcc-13)
set(CMAKE_CXX_COMPILER g++-13)
# find the brew GNU compiler version
execute_process(COMMAND bash -c "ls $HOMEBREW_PREFIX/bin/g++*" OUTPUT_VARIABLE BREW_GXX OUTPUT_STRIP_TRAILING_WHITESPACE)
# last two characters are the version number
string(REGEX REPLACE ".*g\\+\\+-(.*)$" "\\1" BREW_GXX_VERSION ${BREW_GXX})
message(STATUS "Found GNU g++ compiler version: ${BREW_GXX_VERSION}")
# use any GNU g++ compiler in BREW_PREFIX/bin without specifying the version
set(CMAKE_C_COMPILER "$ENV{HOMEBREW_PREFIX}/bin/gcc-${BREW_GXX_VERSION}")
set(CMAKE_CXX_COMPILER "$ENV{HOMEBREW_PREFIX}/bin/g++-${BREW_GXX_VERSION}")
endif()

# set the project name and get version from version.txt
Expand All @@ -16,7 +22,10 @@ project(grss VERSION ${ver})
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED True)
add_compile_options(-std=c++11 -O3 -fPIC -fopenmp -DGRSS_VERSION="${${PROJECT_NAME}_VERSION}") # operational flags
# add_compile_options(-std=c++11 -g3 -fPIC -fopenmp -DGRSS_VERSION="${${PROJECT_NAME}_VERSION}" -Werror -Wall -Wextra -pedantic) # debugging flags
# add_compile_options(-std=c++11 -g2 -fPIC -fopenmp -DGRSS_VERSION="${${PROJECT_NAME}_VERSION}" -Werror -Wall -Wextra -pedantic) # debugging flags

# Set static library output directory
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/build)

# Set header file directory
include_directories(${CMAKE_SOURCE_DIR}/include)
Expand Down
18 changes: 11 additions & 7 deletions docs/source/api_cpp.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ C++ API
cppsummary/force
cppsummary/gr15
cppsummary/interpolate
cppsummary/observe
cppsummary/parallel
cppsummary/pck
cppsummary/simulation
Expand Down Expand Up @@ -37,25 +38,28 @@ C++ API
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/interpolate.html" title="interpolate.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">interpolate.h</span></code></a></p></td>
<td><p>GRSS C++ integrator state interpolation submodule</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/parallel.html" title="parallel.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">parallel.h</span></code></a></p></td>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/observe.html" title="observe.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">observe.h</span></code></a></p></td>
<td><p>GRSS C++ observations submodule</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/parallel.html" title="parallel.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">parallel.h</span></code></a></p></td>
<td><p>GRSS C++ parallel propagation submodule</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/pck.html" title="pck.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">pck.h</span></code></a></p></td>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/pck.html" title="pck.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">pck.h</span></code></a></p></td>
<td><p>GRSS C++ PCK file submodule</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/simulation.html" title="simulation.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">simulation.h</span></code></a></p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/simulation.html" title="simulation.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">simulation.h</span></code></a></p></td>
<td><p>GRSS C++ propagator simulation submodule</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/spk.html" title="spk.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">spk.h</span></code></a></p></td>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/spk.html" title="spk.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">spk.h</span></code></a></p></td>
<td><p>GRSS C++ SPK file submodule</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/stm.html" title="stm.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">stm.h</span></code></a></p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/stm.html" title="stm.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">stm.h</span></code></a></p></td>
<td><p>GRSS C++ state transition matrix submodule</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/timeconvert.html" title="timeconvert.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">timeconvert.h</span></code></a></p></td>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/timeconvert.html" title="timeconvert.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">timeconvert.h</span></code></a></p></td>
<td><p>GRSS C++ time conversion submodule</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="cppsummary/utilities.html" title="utilities.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">utilities.h</span></code></a></p></td>
<tr class="row-odd"><td><p><a class="reference internal" href="cppsummary/utilities.html" title="utilities.h"><code class="xref py py-obj docutils literal notranslate"><span class="pre">utilities.h</span></code></a></p></td>
<td><p>GRSS C++ utilities submodule</p></td>
</tr>
</tbody>
Expand Down
6 changes: 6 additions & 0 deletions docs/source/cppsummary/observe.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
observe.h
=============

.. doxygenfile:: observe.h
:project: GRSS

2 changes: 0 additions & 2 deletions docs/source/examples.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,3 @@ Examples
tests/python/fit/eggl.ipynb
tests/python/fit/farnocchia.ipynb
tests/python/fit/shantanunaidu.ipynb

Note: The OD examples show a statistically significant difference between GRSS and JPL SBDB orbit solutions. This stems from the fact that the two handle astrometry from the Gaia Focused Product Release (FPR) differently. These differences will be reconciled in the future.
2 changes: 2 additions & 0 deletions docs/source/fit.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@ The optical measurements are acquired using the `Minor Planet Center API <https:

#. Star catalog biases [#]_
#. Measurement weighting [#]_
#. Gaia astrometry handling [#]_

Once the optical astrometry has been processed, the radar astrometry has been acquired, and the initial orbit is provided by the user, the least squares filter can be run. As of now, the filter can fit the nominal state, the nongravitational acceleration parameters, and any impulsive maneuver events. Currently the partial derivatives in the normal matrix are calculated using 1\ :sup:`st`-order central differences by default, but analytical partial derivatives are also a choice offered to the user. The filter also implements an outlier rejection scheme [#]_ to make sure any spurious measurements do not contaminate the fit.

.. rubric:: References
.. [#] Eggl, S., Farnocchia, D., Chamberlin, A.B., and Chesley, S.R., "Star catalog position and proper motion corrections in asteroid astrometry II: The Gaia era", Icarus, Volume 339, Pages 1-17, 2020. https://doi.org/10.1016/j.icarus.2019.113596.
.. [#] Vereš, P., Farnocchia, D., Chesley, S.R., and Chamberlin, A.B., "Statistical analysis of astrometric errors for the most productive asteroid surveys", Volume 296, Pages 139-149, 2017. https://doi.org/10.1016/j.icarus.2017.05.021.
.. [#] Fuentes-Muñoz, O., Farnocchia, D., Naidu, S.P., and Park, R.S., "Asteroid Orbit Determination Using Gaia FPR: Statistical Analysis", AJ, Volume 167, Pages 290-299, 2024. https://doi.org/10.3847/1538-3881/ad4291.
.. [#] Carpino, M., Milani, A., and Chesley, S.R., "Error statistics of asteroid optical astrometric observations", Icarus, Volume 166, Pages 248-270, 2003. https://doi.org/10.1016/S0019-1035(03)00051-4.
32 changes: 20 additions & 12 deletions grss/fit/fit_optical.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,18 @@ def _ades_ast_cat_check(df):
df : pandas DataFrame
ADES data frame
Returns
-------
df : pandas DataFrame
ADES data frame with invalid astCat values removed
Raises
------
ValueError
If the astCat values are invalid
"""
# from https://www.minorplanetcenter.net/iau/info/ADESFieldValues.html
valid_cats = [
'Gaia_Int', 'PS1_DR1', 'PS1_DR2', 'ATLAS2',
'Gaia3', 'Gaia3E', 'Gaia2', 'Gaia1', 'Gaia_2016',
'NOMAD', 'PPMXL', 'UBSC', 'UCAC5', 'UCAC4', 'URAT1', '2MASS'
]
valid_cats = list(ades_catalog_map.keys())
deprecated_cats = [
'UCAC3', 'UCAC2', 'UCAC1',
'USNOB1', 'USNOA2', 'USNOSA2', 'USNOA1', 'USNOSA1',
Expand All @@ -91,11 +92,18 @@ def _ades_ast_cat_check(df):
'MPOSC3', 'PPM', 'AC', 'SAO1984', 'SAO', 'AGK3', 'FK4',
'ACRS', 'LickGas', 'Ida93', 'Perth70', 'COSMOS',
'Yale', 'ZZCAT', 'IHW', 'GZ', 'UNK']
if df['astCat'].isin(deprecated_cats).any():
df_cats = df['astCat']
if df_cats.isin(deprecated_cats).any():
print("\tWARNING: At least one deprecated star catalog in the data.")
if not df['astCat'].isin(valid_cats+deprecated_cats).all():
raise ValueError(f"At least one invalid astCat in the data: {df['astCat'].unique()}.\n"
f"Acceptable astCat values are {valid_cats+deprecated_cats}.")
if not df_cats.isin(valid_cats).all():
invalid_cats = np.setdiff1d(df_cats.unique(), valid_cats)
print("\tWARNING: At least one unrecognized astCat in the data. "
f"Unrecognized values are {invalid_cats}. "
"Force deleting corresponding observations and setting catalog to UNK...")
delete_idx = df[df['astCat'].isin(invalid_cats)].index
df.loc[delete_idx, 'selAst'] = 'd'
df.loc[delete_idx, 'astCat'] = 'UNK'
return df

def create_optical_obs_df(body_id, optical_obs_file=None, t_min_tdb=None,
t_max_tdb=None, verbose=False):
Expand Down Expand Up @@ -156,7 +164,7 @@ def create_optical_obs_df(body_id, optical_obs_file=None, t_min_tdb=None,
if verbose:
print(f"Read in {len(obs_df)} observations from the MPC.")
_ades_mode_check(obs_df)
_ades_ast_cat_check(obs_df)
obs_df = _ades_ast_cat_check(obs_df)
# filter the data based on the time range
obs_df.query(f"{t_min_utc} <= obsTimeMJD <= {t_max_utc}", inplace=True)
# reindex the data frame
Expand Down Expand Up @@ -304,8 +312,8 @@ def add_gaia_obs(obs_df, t_min_tdb=None, t_max_tdb=None, gaia_dr='gaiadr3', verb
if t_max_tdb is None:
t_max_tdb = np.inf
# get gaia query results
perm_id = obs_df['permID'][0]
prov_id = obs_df['provID'][0]
perm_id = obs_df.iloc[-1]['permID']
prov_id = obs_df.iloc[-1]['provID']
body_id = perm_id if isinstance(perm_id, str) else prov_id
res = _get_gaia_query_results(body_id, release=gaia_dr)
if verbose:
Expand Down
22 changes: 15 additions & 7 deletions grss/fit/fit_radar.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def add_radar_obs(obs_df, t_min_tdb=None, t_max_tdb=None, verbose=False):
ValueError
If the observation type is not recognized
"""
perm_id = obs_df['permID'][0]
prov_id = obs_df['provID'][0]
perm_id = obs_df.iloc[-1]['permID']
prov_id = obs_df.iloc[-1]['provID']
if t_min_tdb is None:
t_min_utc = -np.inf
else:
Expand All @@ -71,6 +71,7 @@ def add_radar_obs(obs_df, t_min_tdb=None, t_max_tdb=None, verbose=False):
print(f"Read in {num_obs} radar observations from JPL radar API.")
data = raw_data['data']
time_range_count = 0
pre1972_count = 0
for i in range(num_obs):
obs = data[num_obs-i-1]
date = Time(obs[1], format='iso', scale='utc')
Expand All @@ -84,19 +85,18 @@ def add_radar_obs(obs_df, t_min_tdb=None, t_max_tdb=None, verbose=False):
freq = float(obs[5])
# transmitter and receiver codes, use radar_observer_map if you
# want to use MPC station info (less accurate in my experience)
tx_code = obs[6]
rx_code = obs[7]
bounce_point = obs[8]
bounce_point_int = 1 if bounce_point == 'C' else 0
rx_code = obs[6]
tx_code = obs[7]
bounce_point_int = 1 if obs[8] == 'C' else 0
idx = len(obs_df)
obs_df.loc[idx,'permID'] = perm_id
obs_df.loc[idx,'provID'] = prov_id
obs_df.loc[idx,'obsTime'] = f'{date.utc.isot}Z'
obs_df.loc[idx,'obsTimeMJD'] = date.utc.mjd
obs_df.loc[idx,'obsTimeMJDTDB'] = date.tdb.mjd
obs_df.loc[idx,'mode'] = 'RAD'
obs_df.loc[idx,'trx'] = tx_code
obs_df.loc[idx,'rcv'] = rx_code
obs_df.loc[idx,'trx'] = tx_code
obs_df.loc[idx,'delay'] = obs_val/1.0e6 if delay else np.nan
obs_df.loc[idx,'rmsDelay'] = obs_sigma if delay else np.nan
obs_df.loc[idx,'doppler'] = obs_val if doppler else np.nan
Expand All @@ -105,6 +105,14 @@ def add_radar_obs(obs_df, t_min_tdb=None, t_max_tdb=None, verbose=False):
obs_df.loc[idx,'frq'] = freq
obs_df.loc[idx,'sigDelay'] = obs_sigma if delay else np.nan
obs_df.loc[idx,'sigDoppler'] = obs_sigma if doppler else np.nan
if date.utc.mjd < 41317:
pre1972_count += 1
obs_df.loc[idx,'selAst'] = 'd'
if pre1972_count > 0:
print(f"\tWARNING: {pre1972_count} radar observations were taken before 1972.\n"
"\t\tThese residuals cannot be reliably computed because high-accuracy Earth "
"orientation kernels are not available for this time period.\n"
"\t\tForce deleting them...")
if verbose:
print(f"\tFiltered to {num_obs-time_range_count} observations that satisfy the "
"time range constraints.")
Expand Down
Loading

0 comments on commit 91b120a

Please sign in to comment.