Skip to content

Commit

Permalink
Setup pre-commit (#124)
Browse files Browse the repository at this point in the history
* Setup pre-commit hooks for black, flake8, isort
  • Loading branch information
thodson-usgs authored Nov 25, 2023
1 parent 2747f0d commit 741c727
Show file tree
Hide file tree
Showing 12 changed files with 633 additions and 415 deletions.
3 changes: 3 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[flake8]
max-line-length = 88
extend-ignore = E203, E704
49 changes: 49 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
ci:
autoupdate_schedule: monthly
autofix_prs: false

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-docstring-first
- id: check-json
- id: check-yaml
- id: double-quote-string-fixer
- id: debug-statements
- id: mixed-line-ending

- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args:
- '--py38-plus'

- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
- id: black-jupyter

- repo: https://github.com/keewis/blackdoc
rev: v0.3.8
hooks:
- id: blackdoc

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8

- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort

- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.0-alpha.6
hooks:
- id: prettier
2 changes: 2 additions & 0 deletions .prettierrc.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
semi = false
singleQuote = true
6 changes: 3 additions & 3 deletions dataretrieval/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from importlib.metadata import version
from importlib.metadata import PackageNotFoundError
from importlib.metadata import PackageNotFoundError, version

from dataretrieval.nadp import *
from dataretrieval.nwis import *
from dataretrieval.streamstats import *
Expand All @@ -10,4 +10,4 @@
try:
__version__ = version('dataretrieval')
except PackageNotFoundError:
__version__ = "version-unknown"
__version__ = 'version-unknown'
102 changes: 51 additions & 51 deletions dataretrieval/codes/states.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,54 @@
"""List of 2-digit state codes with commented full names."""
state_codes = [
'al', # Alabama
'ak', # Alaska
'az', # Arizona
'ar', # Arkansas
'ca', # California
'co', # Colorado
'ct', # Connecticut
'de', # Delaware
'dc', # District of Columbia
'fl', # Florida
'ga', # Georgia
'hi', # Hawaii
'id', # Idaho
'il', # Illinois
'in', # Indiana
'ia', # Iowa
'ks', # Kansas
'ky', # Kentucky
'la', # Louisiana
'me', # Maine
'md', # Maryland
'ma', # Massachusetts
'mi', # Michigan
'mn', # Minnesota
'ms', # Mississippi
'mo', # Missouri
'mt', # Montana
'ne', # Nebraska
'nv', # Nevada
'nh', # New Hampshire
'nj', # New Jersey
'nm', # New Mexico
'ny', # New York
'nc', # North Carolina
'nd', # North Dakota
'oh', # Ohio
'ok', # Oklahoma
'or', # Oregon
'pa', # Pennsylvania
'ri', # Rhode Island
'sc', # South Carolina
'sd', # South Dakota
'tn', # Tennessee
'tx', # Texas
'ut', # Utah
'vt', # Vermont
'va', # Virginia
'wa', # Washington
'wv', # West Virginia
'wi', # Wisconsin
'wy', # Wyoming
'al', # Alabama
'ak', # Alaska
'az', # Arizona
'ar', # Arkansas
'ca', # California
'co', # Colorado
'ct', # Connecticut
'de', # Delaware
'dc', # District of Columbia
'fl', # Florida
'ga', # Georgia
'hi', # Hawaii
'id', # Idaho
'il', # Illinois
'in', # Indiana
'ia', # Iowa
'ks', # Kansas
'ky', # Kentucky
'la', # Louisiana
'me', # Maine
'md', # Maryland
'ma', # Massachusetts
'mi', # Michigan
'mn', # Minnesota
'ms', # Mississippi
'mo', # Missouri
'mt', # Montana
'ne', # Nebraska
'nv', # Nevada
'nh', # New Hampshire
'nj', # New Jersey
'nm', # New Mexico
'ny', # New York
'nc', # North Carolina
'nd', # North Dakota
'oh', # Ohio
'ok', # Oklahoma
'or', # Oregon
'pa', # Pennsylvania
'ri', # Rhode Island
'sc', # South Carolina
'sd', # South Dakota
'tn', # Tennessee
'tx', # Texas
'ut', # Utah
'vt', # Vermont
'va', # Virginia
'wa', # Washington
'wv', # West Virginia
'wi', # Wisconsin
'wy', # Wyoming
]
57 changes: 35 additions & 22 deletions dataretrieval/nadp.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tools for retrieving data from the National Atmospheric Deposition Program (NADP) including
the National Trends Network (NTN), the Mercury Deposition Network (MDN).
Tools for retrieving data from the National Atmospheric Deposition Program
(NADP) including the National Trends Network (NTN), the Mercury Deposition
Network (MDN).
National Trends Network
-----------------------
Expand Down Expand Up @@ -28,32 +29,43 @@
"""

import requests
import zipfile
import io
import os
import re
import zipfile
from os.path import basename

import requests

NADP_URL = 'https://nadp.slh.wisc.edu'
NADP_MAP_EXT = 'filelib/maps'

NTN_CONC_PARAMS = ['pH', 'So4', 'NO3', 'NH4', 'Ca',
'Mg', 'K', 'Na', 'Cl', 'Br']
NTN_DEP_PARAMS = ['H', 'So4', 'NO3', 'NH4', 'Ca', 'Mg',
'K', 'Na', 'Cl', 'Br', 'N', 'SPlusN']
NTN_CONC_PARAMS = ['pH', 'So4', 'NO3', 'NH4', 'Ca', 'Mg', 'K', 'Na', 'Cl', 'Br']
NTN_DEP_PARAMS = [
'H',
'So4',
'NO3',
'NH4',
'Ca',
'Mg',
'K',
'Na',
'Cl',
'Br',
'N',
'SPlusN',
]

NTN_MEAS_TYPE = ['conc', 'dep', 'precip'] # concentration or deposition


class NADP_ZipFile(zipfile.ZipFile):
"""Extend zipfile.ZipFile for working on data from NADP
"""
"""Extend zipfile.ZipFile for working on data from NADP"""

def tif_name(self):
"""Get the name of the tif file in the zip file."""
filenames = self.namelist()
r = re.compile(".*tif$")
r = re.compile('.*tif$')
tif_list = list(filter(r.match, filenames))
return tif_list[0]

Expand Down Expand Up @@ -93,23 +105,23 @@ def get_annual_MDN_map(measurement_type, year, path):
>>> # get map of mercury concentration in 2010 and extract it to a path
>>> data_path = dataretrieval.nadp.get_annual_MDN_map(
... measurement_type='conc', year='2010', path='somepath')
... measurement_type='conc', year='2010', path='somepath'
... )
"""
url = '{}/{}/MDN/grids/'.format(NADP_URL, NADP_MAP_EXT)
url = f'{NADP_URL}/{NADP_MAP_EXT}/MDN/grids/'

filename = 'Hg_{}_{}.zip'.format(measurement_type, year)
filename = f'Hg_{measurement_type}_{year}.zip'

z = get_zip(url, filename)

if path:
z.extractall(path)

return '{}{}{}'.format(path, os.sep, basename(filename))
return f'{path}{os.sep}{basename(filename)}'


def get_annual_NTN_map(measurement_type, measurement=None, year=None,
path="."):
def get_annual_NTN_map(measurement_type, measurement=None, year=None, path='.'):
"""Download a NTN map from NDAP.
This function looks for a zip file containing gridded information at:
Expand Down Expand Up @@ -146,22 +158,23 @@ def get_annual_NTN_map(measurement_type, measurement=None, year=None,
>>> # get a map of precipitation in 2015 and extract it to a path
>>> data_path = dataretrieval.nadp.get_annual_NTN_map(
... measurement_type='Precip', year='2015', path='somepath')
... measurement_type='Precip', year='2015', path='somepath'
... )
"""
url = '{}/{}/NTN/grids/{}/'.format(NADP_URL, NADP_MAP_EXT, year)
url = f'{NADP_URL}/{NADP_MAP_EXT}/NTN/grids/{year}/'

filename = '{}_{}.zip'.format(measurement_type, year)
filename = f'{measurement_type}_{year}.zip'

if measurement:
filename = '{}_{}'.format(measurement, filename)
filename = f'{measurement}_{filename}'

z = get_zip(url, filename)

if path:
z.extractall(path)

return '{}{}{}'.format(path, os.sep, basename(filename))
return f'{path}{os.sep}{basename(filename)}'


def get_zip(url, filename):
Expand Down
Loading

0 comments on commit 741c727

Please sign in to comment.