Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…nto develop
  • Loading branch information
mike-gangl committed Aug 23, 2022
2 parents 5694b9d + f8783bd commit 9680163
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 13 deletions.
2 changes: 1 addition & 1 deletion subscriber/podaac_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from datetime import datetime

__version__ = "1.11.0"
extensions = [".nc", ".h5", ".zip", ".tar.gz"]
extensions = [".nc", ".h5", ".zip", ".tar.gz", ".tiff"]
edl = "urs.earthdata.nasa.gov"
cmr = "cmr.earthdata.nasa.gov"
token_url = "https://" + cmr + "/legacy-services/rest/tokens"
Expand Down
8 changes: 2 additions & 6 deletions subscriber/podaac_data_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ def create_parser():
help="Cycle number for determining downloads. can be repeated for multiple cycles",
action='append', type=int)
parser.add_argument("-sd", "--start-date", required=False, dest="startDate",
help="The ISO date time before which data should be retrieved. For Example, --start-date 2021-01-14T00:00:00Z") # noqa E501
help="The ISO date time after which data should be retrieved. For Example, --start-date 2021-01-14T00:00:00Z") # noqa E501
parser.add_argument("-ed", "--end-date", required=False, dest="endDate",
help="The ISO date time after which data should be retrieved. For Example, --end-date 2021-01-14T00:00:00Z") # noqa E501
help="The ISO date time before which data should be retrieved. For Example, --end-date 2021-01-14T00:00:00Z") # noqa E501

# Adding optional arguments
parser.add_argument("-f", "--force", dest="force", action="store_true", help = "Flag to force downloading files that are listed in CMR query, even if the file exists and checksum matches") # noqa E501
Expand Down Expand Up @@ -226,10 +226,6 @@ def run(args=None):

downloads = [item for sublist in downloads_all for item in sublist]

if len(downloads) >= page_size:
logging.warning("Only the most recent " + str(
page_size) + " granules will be downloaded; try adjusting your search criteria (suggestion: reduce time period or spatial region of search) to ensure you retrieve all granules.")

# filter list based on extension
if not extensions:
extensions = pa.extensions
Expand Down
8 changes: 2 additions & 6 deletions subscriber/podaac_data_subscriber.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ def create_parser():

# spatiotemporal arguments
parser.add_argument("-sd", "--start-date", dest="startDate",
help="The ISO date time before which data should be retrieved. For Example, --start-date 2021-01-14T00:00:00Z",
help="The ISO date time after which data should be retrieved. For Example, --start-date 2021-01-14T00:00:00Z",
default=False) # noqa E501
parser.add_argument("-ed", "--end-date", dest="endDate",
help="The ISO date time after which data should be retrieved. For Example, --end-date 2021-01-14T00:00:00Z",
help="The ISO date time before which data should be retrieved. For Example, --end-date 2021-01-14T00:00:00Z",
default=False) # noqa E501
parser.add_argument("-b", "--bounds", dest="bbox",
help="The bounding rectangle to filter result in. Format is W Longitude,S Latitude,E Longitude,N Latitude without spaces. Due to an issue with parsing arguments, to use this command, please use the -b=\"-180,-90,180,90\" syntax when calling from the command line. Default: \"-180,-90,180,90\".",
Expand Down Expand Up @@ -254,10 +254,6 @@ def run(args=None):

downloads = [item for sublist in downloads_all for item in sublist]

if len(downloads) >= page_size:
logging.warning("Only the most recent " + str(
page_size) + " granules will be downloaded; try adjusting your search criteria (suggestion: reduce time period or spatial region of search) to ensure you retrieve all granules.")

# filter list based on extension
if not extensions:
extensions = pa.extensions
Expand Down

0 comments on commit 9680163

Please sign in to comment.