diff --git a/docs/sphinx/source/introtutorial.rst b/docs/sphinx/source/introtutorial.rst index b3a9b9a7b6..b4600e633d 100644 --- a/docs/sphinx/source/introtutorial.rst +++ b/docs/sphinx/source/introtutorial.rst @@ -52,24 +52,13 @@ the :ref:`iotools` module. In this example we will be using PVGIS, one of the data sources available, to retrieve a Typical Meteorological Year (TMY) which includes irradiation, temperature and wind speed. -.. note:: PVGIS uses different naming conventions, so it is required to rename - the weather data variables before using them. Data is already UTC-localized, - so conversion to local timezone is optional. - .. ipython:: python - variables_translation = { - "Gb(n)": "dni", - "G(h)": "ghi", - "Gd(h)": "dhi", - "T2m": "temp_air", - "WS10m": "wind_speed", - } tmys = [] for location in coordinates: latitude, longitude, name, altitude, timezone = location - weather = pvlib.iotools.get_pvgis_tmy(latitude, longitude)[0] - weather = weather.rename(columns=variables_translation) + weather = pvlib.iotools.get_pvgis_tmy(latitude, longitude, + map_variables=True)[0] weather.index.name = "utc_time" tmys.append(weather) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index fa7ed689b5..87c1ac1e93 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -49,6 +49,16 @@ Breaking changes :py:meth:`~pvlib.pvsystem.PVSystem.calcparams_desoto` and :py:meth:`~pvlib.pvsystem.PVSystem.calcparams_cec` (:issue:`1118`, :pull:`1222`) +* Switched the order of the outputs from the PSM3 iotools, notably + :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` + (:issue:`1245`, :pull:`1268`) + +* Changed the naming of the inputs ``startdate``/``enddate`` to ``start``/``end`` in + :py:func:`~pvlib.iotools.get_ecmwf_macc` + (:issue:`1245`, :pull:`1268`) + +* Change the naming of the inputs ``lat``/``lon`` to ``latitude``/``longitude`` in + :py:func:`~pvlib.iotools.get_pvgis_tmy` (:issue:`1245`, :pull:`1268`) Deprecations ~~~~~~~~~~~~ @@ -111,6 +121,8 @@ Enhancements :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) +* Added ``map_variables`` option to :func:`~pvlib.iotools.get_pvgis_tmy` and + :func:`~pvlib.iotools.read_pvgis_tmy` (:issue:`1250`, :pull:`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. (:pull:`1254`, :pull:`1145`, :issue:`1015`) diff --git a/pvlib/iotools/bsrn.py b/pvlib/iotools/bsrn.py index c9c3abb93d..4c02c10f90 100644 --- a/pvlib/iotools/bsrn.py +++ b/pvlib/iotools/bsrn.py @@ -57,10 +57,11 @@ def _empty_dataframe_from_logical_records(logical_records): columns = [] for lr in logical_records: columns += BSRN_COLUMNS[lr][2:] + columns = [c for c in columns if c != 'empty'] return pd.DataFrame(columns=columns) -def get_bsrn(start, end, station, username, password, +def get_bsrn(station, start, end, username, password, logical_records=('0100',), local_path=None): """ Retrieve ground measured irradiance data from the BSRN FTP server. @@ -73,12 +74,12 @@ def get_bsrn(start, end, station, username, password, Parameters ---------- + station: str + 3-letter BSRN station abbreviation start: datetime-like First day of the requested period end: datetime-like Last day of the requested period - station: str - 3-letter BSRN station abbreviation username: str username for accessing the BSRN FTP server password: str diff --git a/pvlib/iotools/ecmwf_macc.py b/pvlib/iotools/ecmwf_macc.py index fc08eea35f..fb42454ee3 100644 --- a/pvlib/iotools/ecmwf_macc.py +++ b/pvlib/iotools/ecmwf_macc.py @@ -34,12 +34,12 @@ def ECMWFDataServer(*a, **kw): } -def _ecmwf(server, startdate, stopdate, params, targetname): +def _ecmwf(server, startdate, enddate, params, targetname): # see http://apps.ecmwf.int/datasets/data/macc-reanalysis/levtype=sfc/ server.retrieve({ "class": "mc", "dataset": "macc", - "date": "%s/to/%s" % (startdate, stopdate), + "date": "%s/to/%s" % (startdate, enddate), "expver": "rean", "grid": "0.75/0.75", "levtype": "sfc", @@ -53,7 +53,7 @@ def _ecmwf(server, startdate, stopdate, params, targetname): }) -def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, +def get_ecmwf_macc(filename, params, start, end, lookup_params=True, server=None, target=_ecmwf): """ Download data from ECMWF MACC Reanalysis API. @@ -64,9 +64,9 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, full path of file where to save data, ``.nc`` appended if not given params : str or sequence of str keynames of parameter[s] to download - startdate : datetime.datetime or datetime.date + start : datetime.datetime or datetime.date UTC date - stopdate : datetime.datetime or datetime.date + end : datetime.datetime or datetime.date UTC date lookup_params : bool, default True optional flag, if ``False``, then codes are already formatted @@ -137,7 +137,7 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, :func:`pvlib.iotools.get_ecmwf_macc`. :: - target(server, startdate, stopdate, params, filename) -> None + target(server, startdate, enddate, params, filename) -> None Examples -------- @@ -161,12 +161,12 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, params = '/'.join(PARAMS.get(p) for p in params) except TypeError: params = PARAMS.get(params) - startdate = startdate.strftime('%Y-%m-%d') - stopdate = stopdate.strftime('%Y-%m-%d') + startdate = start.strftime('%Y-%m-%d') + enddate = end.strftime('%Y-%m-%d') if not server: server = ECMWFDataServer() t = threading.Thread(target=target, daemon=True, - args=(server, startdate, stopdate, params, filename)) + args=(server, startdate, enddate, params, filename)) t.start() return t @@ -191,8 +191,8 @@ def __init__(self, filename): # time resolution in hours self.time_size = self.data.dimensions['time'].size self.start_time = self.data['time'][0] - self.stop_time = self.data['time'][-1] - self.time_range = self.stop_time - self.start_time + self.end_time = self.data['time'][-1] + self.time_range = self.end_time - self.start_time self.delta_time = self.time_range / (self.time_size - 1) def get_nearest_indices(self, latitude, longitude): @@ -281,7 +281,7 @@ def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None): longitude : float longitude in degrees utc_time_range : sequence of datetime.datetime - pair of start and stop naive or UTC date-times + pair of start and end naive or UTC date-times Returns ------- @@ -295,9 +295,9 @@ def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None): if utc_time_range: start_idx = netCDF4.date2index( utc_time_range[0], nctime, select='before') - stop_idx = netCDF4.date2index( + end_idx = netCDF4.date2index( utc_time_range[-1], nctime, select='after') - time_slice = slice(start_idx, stop_idx + 1) + time_slice = slice(start_idx, end_idx + 1) else: time_slice = slice(0, ecmwf_macc.time_size) times = netCDF4.num2date(nctime[time_slice], nctime.units) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 758884160a..44fba674b1 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -28,6 +28,11 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB is described in [1]_ and the PSM3 API is described in [2]_, [3]_, and [4]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- latitude : float or int @@ -61,11 +66,11 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, Returns ------- - headers : dict - metadata from NREL PSM3 about the record, see - :func:`pvlib.iotools.parse_psm3` for fields data : pandas.DataFrame timeseries data from NREL PSM3 + metadata : dict + metadata from NREL PSM3 about the record, see + :func:`pvlib.iotools.parse_psm3` for fields Raises ------ @@ -170,6 +175,11 @@ def parse_psm3(fbuf): Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- fbuf: file-like object @@ -177,15 +187,18 @@ def parse_psm3(fbuf): Returns ------- - headers : dict - metadata from NREL PSM3 about the record, see notes for fields data : pandas.DataFrame timeseries data from NREL PSM3 + metadata : dict + metadata from NREL PSM3 about the record, see notes for fields Notes ----- - The return is a tuple with two items. The first item is a header with - metadata from NREL PSM3 about the record containing the following fields: + The return is a tuple with two items. The first item is a dataframe with + the PSM3 timeseries data. + + The second item is a dictionary with metadata from NREL PSM3 about the + record containing the following fields: * Source * Location ID @@ -234,13 +247,11 @@ def parse_psm3(fbuf): * Surface Albedo Units * Version - The second item is a dataframe with the PSM3 timeseries data. - Examples -------- >>> # Read a local PSM3 file: >>> with open(filename, 'r') as f: # doctest: +SKIP - ... metadata, df = iotools.parse_psm3(f) # doctest: +SKIP + ... df, metadata = iotools.parse_psm3(f) # doctest: +SKIP See Also -------- @@ -254,17 +265,17 @@ def parse_psm3(fbuf): `_ """ # The first 2 lines of the response are headers with metadata - header_fields = fbuf.readline().split(',') - header_fields[-1] = header_fields[-1].strip() # strip trailing newline - header_values = fbuf.readline().split(',') - header_values[-1] = header_values[-1].strip() # strip trailing newline - header = dict(zip(header_fields, header_values)) - # the response is all strings, so set some header types to numbers - header['Local Time Zone'] = int(header['Local Time Zone']) - header['Time Zone'] = int(header['Time Zone']) - header['Latitude'] = float(header['Latitude']) - header['Longitude'] = float(header['Longitude']) - header['Elevation'] = int(header['Elevation']) + metadata_fields = fbuf.readline().split(',') + metadata_fields[-1] = metadata_fields[-1].strip() # strip trailing newline + metadata_values = fbuf.readline().split(',') + metadata_values[-1] = metadata_values[-1].strip() # strip trailing newline + metadata = dict(zip(metadata_fields, metadata_values)) + # the response is all strings, so set some metadata types to numbers + metadata['Local Time Zone'] = int(metadata['Local Time Zone']) + metadata['Time Zone'] = int(metadata['Time Zone']) + metadata['Latitude'] = float(metadata['Latitude']) + metadata['Longitude'] = float(metadata['Longitude']) + metadata['Elevation'] = int(metadata['Elevation']) # get the column names so we can set the dtypes columns = fbuf.readline().split(',') columns[-1] = columns[-1].strip() # strip trailing newline @@ -282,10 +293,10 @@ def parse_psm3(fbuf): dtidx = pd.to_datetime( data[['Year', 'Month', 'Day', 'Hour', 'Minute']]) # in USA all timezones are integers - tz = 'Etc/GMT%+d' % -header['Time Zone'] + tz = 'Etc/GMT%+d' % -metadata['Time Zone'] data.index = pd.DatetimeIndex(dtidx).tz_localize(tz) - return header, data + return data, metadata def read_psm3(filename): @@ -293,6 +304,11 @@ def read_psm3(filename): Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- filename: str @@ -300,11 +316,11 @@ def read_psm3(filename): Returns ------- - headers : dict - metadata from NREL PSM3 about the record, see - :func:`pvlib.iotools.parse_psm3` for fields data : pandas.DataFrame timeseries data from NREL PSM3 + metadata : dict + metadata from NREL PSM3 about the record, see + :func:`pvlib.iotools.parse_psm3` for fields See Also -------- diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index d43d4db87e..3bb2f977c6 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -20,6 +20,8 @@ import requests import pandas as pd from pvlib.iotools import read_epw, parse_epw +import warnings +from pvlib._deprecation import pvlibDeprecationWarning URL = 'https://re.jrc.ec.europa.eu/api/' @@ -363,19 +365,19 @@ def read_pvgis_hourly(filename, pvgis_format=None, map_variables=True): raise ValueError(err_msg) -def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, +def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, userhorizon=None, startyear=None, endyear=None, url=URL, - timeout=30): + map_variables=None, timeout=30): """ Get TMY data from PVGIS. For more information see the PVGIS [1]_ TMY tool documentation [2]_. Parameters ---------- - lat : float + latitude : float Latitude in degrees north - lon : float - Longitude in dgrees east + longitude : float + Longitude in degrees east outputformat : str, default 'json' Must be in ``['csv', 'basic', 'epw', 'json']``. See PVGIS TMY tool documentation [2]_ for more info. @@ -392,6 +394,9 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, last year to calculate TMY, must be at least 10 years from first year url : str, default :const:`pvlib.iotools.pvgis.URL` base url of PVGIS API, append ``tmy`` to get TMY endpoint + map_variables: bool + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable PVGIS_VARIABLE_MAP. timeout : int, default 30 time in seconds to wait for server response before timeout @@ -403,8 +408,8 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, TMY year for each month, ``None`` for basic and EPW inputs : dict the inputs, ``None`` for basic and EPW - meta : list or dict - meta data, ``None`` for basic + metadata : list or dict + file metadata, ``None`` for basic Raises ------ @@ -426,7 +431,7 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, `_ """ # use requests to format the query string by passing params dictionary - params = {'lat': lat, 'lon': lon, 'outputformat': outputformat} + params = {'lat': latitude, 'lon': longitude, 'outputformat': outputformat} # pvgis only likes 0 for False, and 1 for True, not strings, also the # default for usehorizon is already 1 (ie: True), so only set if False if not usehorizon: @@ -452,22 +457,34 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, data = None, None, None, None if outputformat == 'json': src = res.json() - return _parse_pvgis_tmy_json(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) elif outputformat == 'csv': with io.BytesIO(res.content) as src: - data = _parse_pvgis_tmy_csv(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_csv(src) elif outputformat == 'basic': with io.BytesIO(res.content) as src: - data = _parse_pvgis_tmy_basic(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_basic(src) elif outputformat == 'epw': with io.StringIO(res.content.decode('utf-8')) as src: data, meta = parse_epw(src) - data = (data, None, None, meta) + months_selected, inputs = None, None else: # this line is never reached because if outputformat is not valid then # the response is HTTP/1.1 400 BAD REQUEST which is handled earlier pass - return data + + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) + + return data, months_selected, inputs, meta def _parse_pvgis_tmy_json(src): @@ -521,7 +538,7 @@ def _parse_pvgis_tmy_basic(src): return data, None, None, None -def read_pvgis_tmy(filename, pvgis_format=None): +def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): """ Read a file downloaded from PVGIS. @@ -537,6 +554,10 @@ def read_pvgis_tmy(filename, pvgis_format=None): ``outputformat='basic'``, please set `pvgis_format` to ``'basic'``. If `filename` is a buffer, then `pvgis_format` is required and must be in ``['csv', 'epw', 'json', 'basic']``. + map_variables: bool + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable PVGIS_VARIABLE_MAP. + Returns ------- @@ -546,8 +567,8 @@ def read_pvgis_tmy(filename, pvgis_format=None): TMY year for each month, ``None`` for basic and EPW inputs : dict the inputs, ``None`` for basic and EPW - meta : list or dict - meta data, ``None`` for basic + metadata : list or dict + file metadata, ``None`` for basic Raises ------ @@ -570,7 +591,6 @@ def read_pvgis_tmy(filename, pvgis_format=None): outputformat = Path(filename).suffix[1:].lower() else: outputformat = pvgis_format - # parse the pvgis file based on the output format, either 'epw', 'json', # 'csv', or 'basic' @@ -580,7 +600,7 @@ def read_pvgis_tmy(filename, pvgis_format=None): data, meta = parse_epw(filename) except AttributeError: # str/path has no .read() attribute data, meta = read_epw(filename) - return data, None, None, meta + months_selected, inputs = None, None # NOTE: json, csv, and basic output formats have parsers defined as private # functions in this module @@ -588,30 +608,44 @@ def read_pvgis_tmy(filename, pvgis_format=None): # JSON: use Python built-in json module to convert file contents to a # Python dictionary, and pass the dictionary to the _parse_pvgis_tmy_json() # function from this module - if outputformat == 'json': + elif outputformat == 'json': try: src = json.load(filename) except AttributeError: # str/path has no .read() attribute with open(str(filename), 'r') as fbuf: src = json.load(fbuf) - return _parse_pvgis_tmy_json(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) # CSV or basic: use the correct parser from this module # eg: _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() - if outputformat in ['csv', 'basic']: + elif outputformat in ['csv', 'basic']: # get the correct parser function for this output format from globals() pvgis_parser = globals()['_parse_pvgis_tmy_{:s}'.format(outputformat)] # NOTE: pvgis_parse() is a pvgis parser function from this module, # either _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() try: - pvgis_data = pvgis_parser(filename) + data, months_selected, inputs, meta = pvgis_parser(filename) except AttributeError: # str/path has no .read() attribute with open(str(filename), 'rb') as fbuf: - pvgis_data = pvgis_parser(fbuf) - return pvgis_data + data, months_selected, inputs, meta = pvgis_parser(fbuf) + + else: + # raise exception if pvgis format isn't in ['csv','basic','epw','json'] + err_msg = ( + "pvgis format '{:s}' was unknown, must be either 'epw', 'json', " + "'csv', or 'basic'").format(outputformat) + raise ValueError(err_msg) + + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) + + return data, months_selected, inputs, meta - # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] - err_msg = ( - "pvgis format '{:s}' was unknown, must be either 'epw', 'json', 'csv'" - ", or 'basic'").format(outputformat) - raise ValueError(err_msg) diff --git a/pvlib/iotools/sodapro.py b/pvlib/iotools/sodapro.py index a27e6f1423..68fa82a396 100644 --- a/pvlib/iotools/sodapro.py +++ b/pvlib/iotools/sodapro.py @@ -41,7 +41,7 @@ '0 year 1 month 0 day 0 h 0 min 0 s': '1M'} -def get_cams(start, end, latitude, longitude, email, identifier='mcclear', +def get_cams(latitude, longitude, start, end, email, identifier='mcclear', altitude=None, time_step='1h', time_ref='UT', verbose=False, integrated=False, label=None, map_variables=True, server='www.soda-is.com', timeout=30): @@ -62,19 +62,19 @@ def get_cams(start, end, latitude, longitude, email, identifier='mcclear', Parameters ---------- - start: datetime like - First day of the requested period - end: datetime like - Last day of the requested period latitude: float in decimal degrees, between -90 and 90, north is positive (ISO 19115) longitude : float in decimal degrees, between -180 and 180, east is positive (ISO 19115) + start: datetime like + First day of the requested period + end: datetime like + Last day of the requested period email: str Email address linked to a SoDa account identifier: {'mcclear', 'cams_radiation'} Specify whether to retrieve CAMS Radiation or McClear parameters - altitude: float, default: None + altitude: float, optional Altitude in meters. If None, then the altitude is determined from the NASA SRTM database time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' @@ -96,7 +96,7 @@ def get_cams(start, end, latitude, longitude, email, identifier='mcclear', where applicable. See variable CAMS_VARIABLE_MAP. server: str, default: 'www.soda-is.com' Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) - timeout : int, default 30 + timeout : int, default: 30 Time in seconds to wait for server response before timeout Returns diff --git a/pvlib/tests/iotools/test_psm3.py b/pvlib/tests/iotools/test_psm3.py index ca3a5e3034..0b02c3c291 100644 --- a/pvlib/tests/iotools/test_psm3.py +++ b/pvlib/tests/iotools/test_psm3.py @@ -17,7 +17,7 @@ YEAR_TEST_DATA_5MIN = DATA_DIR / 'test_psm3_2019_5min.csv' MANUAL_TEST_DATA = DATA_DIR / 'test_read_psm3.csv' LATITUDE, LONGITUDE = 40.5137, -108.5449 -HEADER_FIELDS = [ +METADATA_FIELDS = [ 'Source', 'Location ID', 'City', 'State', 'Country', 'Latitude', 'Longitude', 'Time Zone', 'Elevation', 'Local Time Zone', 'Dew Point Units', 'DHI Units', 'DNI Units', 'GHI Units', @@ -46,7 +46,7 @@ def nrel_api_key(): return demo_key -def assert_psm3_equal(header, data, expected): +def assert_psm3_equal(data, metadata, expected): """check consistency of PSM3 data""" # check datevec columns assert np.allclose(data.Year, expected.Year) @@ -65,48 +65,48 @@ def assert_psm3_equal(header, data, expected): assert np.allclose(data['Wind Speed'], expected['Wind Speed']) assert np.allclose(data['Wind Direction'], expected['Wind Direction']) # check header - for hf in HEADER_FIELDS: - assert hf in header + for mf in METADATA_FIELDS: + assert mf in metadata # check timezone - assert (data.index.tzinfo.zone == 'Etc/GMT%+d' % -header['Time Zone']) + assert (data.index.tzinfo.zone == 'Etc/GMT%+d' % -metadata['Time Zone']) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_tmy(nrel_api_key): """test get_psm3 with a TMY""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='tmy-2017') + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='tmy-2017') expected = pd.read_csv(TMY_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_singleyear(nrel_api_key): """test get_psm3 with a single year""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2017', interval=30) + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='2017', interval=30) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_5min(nrel_api_key): """test get_psm3 for 5-minute data""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2019', interval=5) + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='2019', interval=5) assert len(data) == 525600/5 first_day = data.loc['2019-01-01'] expected = pd.read_csv(YEAR_TEST_DATA_5MIN) - assert_psm3_equal(header, first_day, expected) + assert_psm3_equal(first_day, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_check_leap_day(nrel_api_key): - _, data_2012 = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + data_2012, _ = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, names="2012", interval=60, leap_day=True) assert len(data_2012) == (8760 + 24) @@ -149,13 +149,13 @@ def io_input(request): def test_parse_psm3(io_input): """test parse_psm3""" - header, data = psm3.parse_psm3(io_input) + data, metadata = psm3.parse_psm3(io_input) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) def test_read_psm3(): """test read_psm3""" - header, data = psm3.read_psm3(MANUAL_TEST_DATA) + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index fc0638ed74..5a097d25e5 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -9,7 +9,9 @@ import requests from pvlib.iotools import get_pvgis_tmy, read_pvgis_tmy from pvlib.iotools import get_pvgis_hourly, read_pvgis_hourly -from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY, assert_frame_equal +from ..conftest import (DATA_DIR, RERUNS, RERUNS_DELAY, assert_frame_equal, + fail_on_pvlib_version) +from pvlib._deprecation import pvlibDeprecationWarning # PVGIS Hourly tests @@ -356,11 +358,28 @@ def csv_meta(meta_expected): in meta_expected['outputs']['tmy_hourly']['variables'].items()] +@pytest.fixture +def pvgis_tmy_mapped_columns(): + return ['temp_air', 'relative_humidity', 'ghi', 'dni', 'dhi', 'IR(h)', + 'wind_speed', 'wind_direction', 'pressure'] + + +@fail_on_pvlib_version('0.10') +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_pvgis_tmy_variable_map_deprecating_warning_0_10(): + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + _ = get_pvgis_tmy(45, 8) + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.epw' + _ = read_pvgis_tmy(fn) + + @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected, meta_expected): - pvgis_data = get_pvgis_tmy(45, 8) + pvgis_data = get_pvgis_tmy(45, 8, map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) @@ -393,26 +412,28 @@ def _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_kwargs(userhorizon_expected): - _, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False) + _, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False, + map_variables=False) assert inputs['meteo_data']['use_horizon'] is False data, _, _, _ = get_pvgis_tmy( - 45, 8, userhorizon=[0, 10, 20, 30, 40, 15, 25, 5]) + 45, 8, userhorizon=[0, 10, 20, 30, 40, 15, 25, 5], map_variables=False) assert np.allclose( data['G(h)'], userhorizon_expected['G(h)'].values) assert np.allclose( data['Gb(n)'], userhorizon_expected['Gb(n)'].values) assert np.allclose( data['Gd(h)'], userhorizon_expected['Gd(h)'].values) - _, _, inputs, _ = get_pvgis_tmy(45, 8, startyear=2005) + _, _, inputs, _ = get_pvgis_tmy(45, 8, startyear=2005, map_variables=False) assert inputs['meteo_data']['year_min'] == 2005 - _, _, inputs, _ = get_pvgis_tmy(45, 8, endyear=2016) + _, _, inputs, _ = get_pvgis_tmy(45, 8, endyear=2016, map_variables=False) assert inputs['meteo_data']['year_max'] == 2016 @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_basic(expected, meta_expected): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic', + map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) @@ -427,7 +448,7 @@ def _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data): @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv', map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) @@ -458,7 +479,7 @@ def _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_epw(expected, epw_meta): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw', map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) @@ -481,19 +502,33 @@ def test_get_pvgis_tmy_error(): get_pvgis_tmy(45, 8, url='https://re.jrc.ec.europa.eu/') +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_get_pvgis_map_variables(pvgis_tmy_mapped_columns): + actual, _, _, _ = get_pvgis_tmy(45, 8, map_variables=True) + assert all([c in pvgis_tmy_mapped_columns for c in actual.columns]) + + +def test_read_pvgis_tmy_map_variables(pvgis_tmy_mapped_columns): + fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' + actual, _, _, _ = read_pvgis_tmy(fn, map_variables=True) + assert all([c in pvgis_tmy_mapped_columns for c in actual.columns]) + + def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='json') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='json', map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) with fn.open('r') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='json') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='json', + map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) @@ -501,13 +536,14 @@ def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, def test_read_pvgis_tmy_epw(expected, epw_meta): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.epw' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='epw') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='epw', map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) with fn.open('r') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='epw') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='epw', + map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) @@ -515,15 +551,16 @@ def test_read_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.csv' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='csv') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='csv', map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) with fn.open('rb') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='csv') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='csv', + map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) @@ -532,20 +569,22 @@ def test_read_pvgis_tmy_basic(expected, meta_expected): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.txt' # XXX: can't infer outputformat from file extensions for basic with pytest.raises(ValueError, match="pvgis format 'txt' was unknown"): - read_pvgis_tmy(fn) + read_pvgis_tmy(fn, map_variables=False) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='basic') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='basic', map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) with fn.open('rb') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='basic') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='basic', + map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) # file buffer raises TypeError if passed to pathlib.Path() with pytest.raises(TypeError): - read_pvgis_tmy(fbuf) + read_pvgis_tmy(fbuf, map_variables=False) def test_read_pvgis_tmy_exception(): bad_outputformat = 'bad' err_msg = f"pvgis format '{bad_outputformat:s}' was unknown" with pytest.raises(ValueError, match=err_msg): - read_pvgis_tmy('filename', pvgis_format=bad_outputformat) + read_pvgis_tmy('filename', pvgis_format=bad_outputformat, + map_variables=False) diff --git a/pvlib/tests/test_modelchain.py b/pvlib/tests/test_modelchain.py index e52bda72bd..86b7411d0f 100644 --- a/pvlib/tests/test_modelchain.py +++ b/pvlib/tests/test_modelchain.py @@ -1778,7 +1778,7 @@ def test_ModelChain_no_extra_kwargs(sapm_dc_snl_ac_system, location): ModelChain(sapm_dc_snl_ac_system, location, arbitrary_kwarg='value') -@fail_on_pvlib_version('1.0') +@fail_on_pvlib_version('0.10') def test_ModelChain_attributes_deprecated_10(sapm_dc_snl_ac_system, location): match = 'Use ModelChain.results' mc = ModelChain(sapm_dc_snl_ac_system, location)