-
Notifications
You must be signed in to change notification settings - Fork 24
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Per #2550, restructuring the tc_diag python directory.
- Loading branch information
1 parent
17c8705
commit 807106e
Showing
90 changed files
with
1,055 additions
and
164,760 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
Metadata-Version: 2.1 | ||
Name: atcf_tools | ||
Version: 0.12.0 | ||
Summary: Library of routines to support TC diagnostic code. | ||
Home-page: https://bear.cira.colostate.edu/rdemaria/atcf_tools | ||
Author: Robert DeMaria | ||
Author-email: robert.demaria@colostate.edu | ||
License: UNKNOWN | ||
Platform: any | ||
Classifier: Development Status :: 3 - Alpha | ||
Classifier: Intended Audience :: Developers | ||
Classifier: Operating System :: OS Independent | ||
Classifier: Programming Language :: Python | ||
Classifier: Programming Language :: Python :: 3.8 | ||
Classifier: Programming Language :: Python :: 3.9 | ||
Classifier: Programming Language :: Python :: 3.10 | ||
Classifier: Programming Language :: Python :: 3.11 | ||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | ||
Requires-Python: >=3.8 | ||
Description-Content-Type: text/markdown | ||
|
||
# atcf_tools | ||
|
||
Tools for generating and working with Pandas DataFrames made from ATCF files. | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# atcf_tools | ||
|
||
Tools for generating and working with Pandas DataFrames made from ATCF files. |
26 changes: 26 additions & 0 deletions
26
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/PKG-INFO
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
Metadata-Version: 2.1 | ||
Name: atcf-tools | ||
Version: 0.12.0 | ||
Summary: Library of routines to support TC diagnostic code. | ||
Home-page: https://bear.cira.colostate.edu/rdemaria/atcf_tools | ||
Author: Robert DeMaria | ||
Author-email: robert.demaria@colostate.edu | ||
License: UNKNOWN | ||
Platform: any | ||
Classifier: Development Status :: 3 - Alpha | ||
Classifier: Intended Audience :: Developers | ||
Classifier: Operating System :: OS Independent | ||
Classifier: Programming Language :: Python | ||
Classifier: Programming Language :: Python :: 3.8 | ||
Classifier: Programming Language :: Python :: 3.9 | ||
Classifier: Programming Language :: Python :: 3.10 | ||
Classifier: Programming Language :: Python :: 3.11 | ||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | ||
Requires-Python: >=3.8 | ||
Description-Content-Type: text/markdown | ||
|
||
# atcf_tools | ||
|
||
Tools for generating and working with Pandas DataFrames made from ATCF files. | ||
|
||
|
17 changes: 17 additions & 0 deletions
17
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/SOURCES.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
README.md | ||
pyproject.toml | ||
setup.cfg | ||
setup.py | ||
atcf_tools/__init__.py | ||
atcf_tools/active_tracks.py | ||
atcf_tools/filenames.py | ||
atcf_tools/interpolation.py | ||
atcf_tools/overpass.py | ||
atcf_tools/parsing.py | ||
atcf_tools/track_tools.py | ||
atcf_tools.egg-info/PKG-INFO | ||
atcf_tools.egg-info/SOURCES.txt | ||
atcf_tools.egg-info/dependency_links.txt | ||
atcf_tools.egg-info/not-zip-safe | ||
atcf_tools.egg-info/requires.txt | ||
atcf_tools.egg-info/top_level.txt |
1 change: 1 addition & 0 deletions
1
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/dependency_links.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
|
1 change: 1 addition & 0 deletions
1
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/not-zip-safe
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
|
4 changes: 4 additions & 0 deletions
4
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/requires.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
numpy | ||
toml | ||
pandas | ||
scipy |
1 change: 1 addition & 0 deletions
1
scripts/python/tc_diag/atcf_tools/atcf_tools.egg-info/top_level.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
atcf_tools |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
__version__ = "0.12.0" |
42 changes: 42 additions & 0 deletions
42
scripts/python/tc_diag/atcf_tools/atcf_tools/active_tracks.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
import glob | ||
import pathlib | ||
from typing import List | ||
import datetime as dt | ||
|
||
import pandas as pd | ||
import numpy as np | ||
|
||
from atcf_tools import track_tools | ||
|
||
|
||
def get_active_tracks(glob_string: str, | ||
search_time: dt.datetime, | ||
search_reverse_hours: int, | ||
search_recursive=False) -> List[pd.DataFrame]: | ||
if search_reverse_hours < 0: | ||
raise ValueError( | ||
f"search_reverse_hours must be > 0, given: {search_reverse_hours}") | ||
|
||
start_time = search_time - dt.timedelta(hours=search_reverse_hours) | ||
|
||
filenames = glob.glob(glob_string, recursive=search_recursive) | ||
active_tracks = [] | ||
for filename in filenames: | ||
adeck_track = track_tools.get_carq_track(filename) | ||
if track_has_times_in_range(adeck_track, start_time, search_time): | ||
adeck_track["atcf_filename"] = filename | ||
active_tracks.append(adeck_track) | ||
|
||
return active_tracks | ||
|
||
|
||
def track_has_times_in_range(track: pd.DataFrame, start_time: dt.timedelta, | ||
end_time: dt.timedelta) -> bool: | ||
if end_time < start_time: | ||
raise ValueError( | ||
f"End time must be after start time given start: {start_time} end: {end_time}" | ||
) | ||
|
||
valid_indices = (track.index >= start_time) & (track.index <= end_time) | ||
is_track_active = np.any(valid_indices) | ||
return is_track_active |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
import re | ||
|
||
ATCF_ID_REGEX = re.compile( | ||
r"(?P<basin>[a-zA-Z]{2})(?P<storm_number>\d{2})(?P<year>\d{4})") | ||
ATCF_FILENAME_STEM_REGEX = re.compile( | ||
r"(?P<track_type>[a-zA-Z])(?P<basin>[a-zA-Z]{2})(?P<storm_number>\d{2})(?P<year>\d{4})" | ||
) |
65 changes: 65 additions & 0 deletions
65
scripts/python/tc_diag/atcf_tools/atcf_tools/interpolation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
import datetime as dt | ||
from typing import List, Tuple | ||
|
||
import numpy as np | ||
import pandas as pd | ||
|
||
from atcf_tools import track_tools | ||
|
||
|
||
def positions_at_times( | ||
track: pd.DataFrame, | ||
times: List[dt.datetime], | ||
method="pchip", | ||
union_times: bool = False) -> Tuple[pd.Series, pd.Series]: | ||
lons, lats = track_tools.get_positions(track) | ||
lons = track_tools.remove_duplicate_times(lons) | ||
lats = track_tools.remove_duplicate_times(lats) | ||
interp_lats = lats.copy() | ||
|
||
sin_lons = np.sin(np.radians(lons)) | ||
cos_lons = np.cos(np.radians(lons)) | ||
|
||
for time in times: | ||
if time not in lons.index: | ||
sin_lons.loc[time] = np.nan | ||
cos_lons.loc[time] = np.nan | ||
interp_lats.loc[time] = np.nan | ||
|
||
sin_lons = sin_lons.sort_index() | ||
cos_lons = cos_lons.sort_index() | ||
interp_lats = interp_lats.sort_index() | ||
|
||
sin_lons.interpolate(method=method, inplace=True) | ||
cos_lons.interpolate(method=method, inplace=True) | ||
interp_lats.interpolate(method=method, inplace=True) | ||
|
||
interp_lons = np.degrees(np.arctan2(sin_lons, cos_lons)) | ||
|
||
if not union_times: | ||
# pylint: disable=no-member | ||
interp_lons = interp_lons.loc[times] | ||
interp_lats = interp_lats.loc[times] | ||
|
||
return interp_lons, interp_lats | ||
|
||
|
||
def field_at_times(track: pd.DataFrame, | ||
field_name: str, | ||
times: List[dt.datetime], | ||
method="linear", | ||
union_times: bool = False) -> pd.Series: | ||
column = track[field_name].copy(deep=True) | ||
column = track_tools.remove_duplicate_times(column) | ||
|
||
for time in times: | ||
if time not in column.index: | ||
column.loc[time] = np.nan | ||
|
||
column.sort_index(inplace=True) | ||
column.interpolate(method=method, inplace=True) | ||
|
||
if not union_times: | ||
column = column.loc[times] | ||
|
||
return column |
101 changes: 101 additions & 0 deletions
101
scripts/python/tc_diag/atcf_tools/atcf_tools/overpass.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
"""Routines to help find overpasses.""" | ||
import dataclasses | ||
import datetime as dt | ||
from typing import Tuple | ||
|
||
import numpy as np | ||
import pandas as pd | ||
|
||
EARTH_RADIUS = 6371 | ||
|
||
|
||
def filter_track_by_time_range(track: pd.DataFrame, start_time: dt.datetime, | ||
end_time: dt.datetime) -> pd.DataFrame: | ||
indices = np.logical_and(track.index >= start_time, | ||
track.index <= end_time) | ||
return track.loc[indices] | ||
|
||
|
||
@dataclasses.dataclass(frozen=True) | ||
class OverpassPoint: | ||
time: dt.datetime | ||
lon: float | ||
lat: float | ||
|
||
|
||
@dataclasses.dataclass(frozen=True) | ||
class Overpass: | ||
distance_km: float | ||
track_a_point: OverpassPoint | ||
track_b_point: OverpassPoint | ||
|
||
|
||
def find_overpass(track_a: pd.DataFrame, | ||
track_b: pd.DataFrame, | ||
start_time: dt.datetime, | ||
end_time: dt.datetime, | ||
a_lon_name="lon", | ||
b_lon_name="lon", | ||
a_lat_name="lat", | ||
b_lat_name="lat"): | ||
filtered_a = filter_track_by_time_range(track_a, start_time, end_time) | ||
filtered_b = filter_track_by_time_range(track_b, start_time, end_time) | ||
|
||
if len(filtered_a) <= 0 or len(filtered_b) <= 0: | ||
return None | ||
|
||
min_distance_km, min_a_index, min_b_index = _find_min_distance_indices( | ||
filtered_a[a_lon_name], filtered_a[a_lat_name], filtered_b[b_lon_name], | ||
filtered_b[b_lat_name]) | ||
|
||
track_a_point = _get_overpass_point_at_index(filtered_a, min_a_index, | ||
a_lon_name, a_lat_name) | ||
track_b_point = _get_overpass_point_at_index(filtered_b, min_b_index, | ||
b_lon_name, b_lat_name) | ||
overpass = Overpass(min_distance_km, track_a_point, track_b_point) | ||
return overpass | ||
|
||
|
||
def _find_min_distance_indices(lons_a: np.ndarray, lats_a: np.ndarray, | ||
lons_b: np.ndarray, | ||
lats_b: np.ndarray) -> Tuple[float, int, int]: | ||
min_distance_km = None | ||
min_a_index = None | ||
min_b_index = None | ||
for i, lon, lat in zip(range(len(lons_a)), lons_a, lats_a): | ||
distances_km = haversine_distance_km(lon, lat, lons_b, lats_b) | ||
track_b_index = np.argmin(distances_km) | ||
distance_km = distances_km[track_b_index] | ||
|
||
if min_distance_km is None or distance_km < min_distance_km: | ||
min_distance_km = distance_km | ||
min_a_index = i | ||
min_b_index = track_b_index | ||
|
||
return min_distance_km, min_a_index, min_b_index | ||
|
||
|
||
def _get_overpass_point_at_index( | ||
filtered: pd.DataFrame, index: int, lon_name: str, | ||
lat_name: str) -> Tuple[dt.datetime, float, float]: | ||
time = filtered.index[index] | ||
lon = filtered[lon_name].iloc[index] | ||
lat = filtered[lat_name].iloc[index] | ||
point = OverpassPoint(time, lon, lat) | ||
return point | ||
|
||
|
||
def haversine_distance_km(lons_a: np.ndarray, lats_a: np.ndarray, | ||
lons_b: np.ndarray, | ||
lats_b: np.ndarray) -> np.ndarray: | ||
lons_a = lons_a % 360 | ||
lons_b = lons_b % 360 | ||
|
||
d_lat = np.radians(lats_b - lats_a) | ||
d_lon = np.radians(lons_b - lons_a) | ||
a = np.sin(d_lat / 2.0) * np.sin(d_lat / 2) + \ | ||
np.cos(np.radians(lats_a)) * \ | ||
np.cos(np.radians(lats_b)) * np.sin(d_lon / 2.0) * np.sin(d_lon / 2.0) | ||
c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1 - a)) | ||
d = EARTH_RADIUS * c | ||
return d |
Oops, something went wrong.