Skip to content

Commit

Permalink
updated get_traj method in hytraj.py so that parsing the date informa…
Browse files Browse the repository at this point in the history
…tion does not

generate a UserWarning.
  • Loading branch information
Alice Crawford committed Oct 8, 2023
1 parent a1f865f commit cd34a1b
Showing 1 changed file with 24 additions and 6 deletions.
30 changes: 24 additions & 6 deletions monetio/models/hytraj.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,8 @@ def get_traj(tdump):
Date and Time are a Datetime Object
"""
# 2023 Oct 8. the method of parsing dates was generating a warning.
# rewrote so no warning is generated.
# Going back to first line of file
tdump.seek(0)
# Gets the starting locations
Expand All @@ -224,21 +226,37 @@ def get_traj(tdump):
variables = varibs[1:]
# Read the traj arrays into pandas dataframe
heads = [
"time",
"traj_num",
"met_grid",
"forecast_hour",
"traj_age",
"latitude",
"longitude",
"altitude",
] + variables
traj = pd.read_csv(tdump, header=None, sep=r"\s+", parse_dates={"time": [2, 3, 4, 5, 6]})
] + variables + ['time']
def dateparse(row):
slist = [row[2],row[3],row[4],row[5],row[6]]
tstr = ' '.join(slist)
tstr = time_str_fixer(tstr)
tdate = pd.to_datetime(tstr,format="%y %m %d %H %M")
return tdate
dhash = {0:int,1:int,2:str,3:str,4:str,5:str,6:str,7:float,8:float,9:float,10:float,11:float}
traj = pd.read_csv(tdump, header=None, sep=r"\s+",dtype=dhash)
traj['time'] = traj.apply(lambda row: dateparse(row),axis=1)
traj = traj.drop([2,3,4,5,6],axis=1)
# Adds headers to dataframe
traj.columns = heads
# Makes all headers lowercase
neworder = [
"time",
"traj_num",
"met_grid",
"forecast_hour",
"traj_age",
"latitude",
"longitude",
"altitude",
] + variables
traj = traj[neworder]
traj.columns = map(str.lower, traj.columns)
# Puts time datetime object
traj["time"] = traj.apply(lambda row: time_str_fixer(row["time"]), axis=1)
traj["time"] = pd.to_datetime(traj["time"], format="%y %m %d %H %M")
return traj

0 comments on commit cd34a1b

Please sign in to comment.