From d7deb80cdc5d1b63de5b2865a0c5cf24d4655fc1 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 22 Feb 2021 22:14:15 +0100 Subject: [PATCH 01/35] Add cams.get_cams_radiation function --- docs/sphinx/source/api.rst | 1 + docs/sphinx/source/whatsnew/v0.9.0.rst | 3 + pvlib/iotools/__init__.py | 1 + pvlib/iotools/cams.py | 207 +++++++++++++++++++++++++ 4 files changed, 212 insertions(+) create mode 100644 pvlib/iotools/cams.py diff --git a/docs/sphinx/source/api.rst b/docs/sphinx/source/api.rst index 8805d199a4..31204b6f0d 100644 --- a/docs/sphinx/source/api.rst +++ b/docs/sphinx/source/api.rst @@ -484,6 +484,7 @@ relevant to solar energy modeling. iotools.get_pvgis_tmy iotools.read_pvgis_tmy iotools.read_bsrn + iotools.get_cams_mcclear A :py:class:`~pvlib.location.Location` object may be created from metadata in some files. diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 81e7a0c60b..a4e2688bb0 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -64,6 +64,9 @@ Enhancements ~~~~~~~~~~~~ * Add :func:`~pvlib.iotools.read_bsrn` for reading BSRN solar radiation data files. (:pull:`1145`, :issue:`1015`) +* Add :func:`~pvlib.iotools.get_cams_radiation` for retrieving CAMS McClear + clear-sky radiation time series. + files. (:pull:`1145`, :issue:`1015`) * In :py:class:`~pvlib.modelchain.ModelChain`, attributes which contain output of models are now collected into ``ModelChain.results``. (:pull:`1076`, :issue:`1067`) diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py index ba5d5e8807..737ee66d4d 100644 --- a/pvlib/iotools/__init__.py +++ b/pvlib/iotools/__init__.py @@ -14,3 +14,4 @@ from pvlib.iotools.psm3 import parse_psm3 # noqa: F401 from pvlib.iotools.pvgis import get_pvgis_tmy, read_pvgis_tmy # noqa: F401 from pvlib.iotools.bsrn import read_bsrn # noqa: F401 +from pvlib.iotools.cams import get_cams_radiation # noqa: F401 diff --git a/pvlib/iotools/cams.py b/pvlib/iotools/cams.py new file mode 100644 index 0000000000..c802420623 --- /dev/null +++ b/pvlib/iotools/cams.py @@ -0,0 +1,207 @@ +"""Functions to access data from Copernicus Atmosphere Monitoring Service + (CAMS) radiation service. +.. codeauthor:: Adam R. Jensen +""" + +import pandas as pd +import requests +import io + + +MCCLEAR_COLUMNS = ['Observation period', 'TOA', 'Clear sky GHI', + 'Clear sky BHI', 'Clear sky DHI', 'Clear sky BNI'] + +MCCLEAR_VERBOSE_COLUMNS = ['sza', 'summer/winter split', 'tco3', 'tcwv', + 'AOD BC', 'AOD DU', 'AOD SS', 'AOD OR', 'AOD SU', + 'AOD NI', 'AOD AM', 'alpha', 'Aerosol type', + 'fiso', 'fvol', 'fgeo', 'albedo'] + +# Dictionary mapping CAMS MCCLEAR variables to pvlib names +MCCLEAR_VARIABLE_MAP = { + 'TOA': 'ghi_extra', + 'Clear sky GHI': 'ghi_clear', + 'Clear sky BHI': 'bhi_clear', + 'Clear sky DHI': 'dhi_clear', + 'Clear sky BNI': 'dni_clear', + 'sza': 'solar_zenith', +} + + +# Dictionary mapping Python time steps to CAMS time step format +TIME_STEPS = {'1min': 'PT01M', '15min': 'PT15M', '1h': 'PT01H', '1d': 'P01D', + '1M': 'P01M'} + +TIME_STEPS_HOURS = {'1min': 1/60, '15min': 15/60, '1h': 1, '1d': 24} + + +def get_cams_mcclear(start_date, end_date, latitude, longitude, email, + altitude=None, time_step='1h', time_ref='UT', + integrated=False, label=None, verbose=False, + map_variables=True, server='www.soda-is.com'): + """ + Retrieve time-series of clear-sky global, beam, and diffuse radiation + anywhere in the world from CAMS McClear [1]_ using the WGET service [2]_. + + + Geographical coverage: wordwide + Time coverage: 2004-01-01 to two days ago + Access: free, but requires registration, see [1]_ + Requests: max. 100 per day + + + Parameters + ---------- + start_date: datetime like + First day of the requested period + end_date: datetime like + Last day of the requested period + latitude: float + in decimal degrees, between -90 and 90, north is positive (ISO 19115) + longitude : float + in decimal degrees, between -180 and 180, east is positive (ISO 19115) + altitude: float, default: None + Altitude in meters. If None, then the altitude is determined from the + NASA SRTM database + email: str + Email address linked to a SoDa account + time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' + Time step of the time series, either 1 minute, 15 minute, hourly, + daily, or monthly. + time_reference: str, {'UT', 'TST'}, default: 'UT' + 'UT' (universal time) or 'TST' (True Solar Time) + integrated: boolean, default False + Whether to return integrated irradiation values (Wh/m^2) from CAMS or + average irradiance values (W/m^2) as is more commonly used + label: {‘right’, ‘left’}, default: None + Which bin edge label to label bucket with. The default is ‘left’ for + all frequency offsets except for ‘M’ which has a default of ‘right’. + verbose: boolean, default: False + Verbose mode outputs additional parameters (aerosols). Only avaiable + for 1 minute and universal time. See [1] for parameter description. + map_variables: bool, default: True + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable MCCLEAR_VARIABLE_MAP. + server: str, default: 'www.soda-is.com' + Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) + + + Notes + ---------- + The returned data Dataframe includes the following fields: + + ======================= ====== ========================================== + Key, mapped key Format Description + ======================= ====== ========================================== + **Mapped field names are returned when the map_variables argument is True** + -------------------------------------------------------------------------- + Observation period str Beginning/end of time period + TOA, ghi_extra float Horizontal radiation at top of atmosphere + Clear sky GHI, ghi_clear float Clear sky global radiation on horizontal + Clear sky BHI, bhi_clear float Clear sky beam radiation on horizontal + Clear sky DHI, dhi_clear float Clear sky diffuse radiation on horizontal + Clear sky BNI, dni_clear float Clear sky beam radiation normal to sun + ======================= ====== ========================================== + + For the returned units see the integrated argument. For description of + additional output parameters in verbose mode, see [1]. + + Note that it is recommended to specify the latitude and longitude to at + least the fourth decimal place. + + Variables corresponding to standard pvlib variables are renamed, + e.g. `sza` becomes `solar_zenith`. See the + `pvlib.iotools.cams.MCCLEAR_VARIABLE_MAP` dict for the complete mapping. + + + References + ---------- + .. [1] `CAMS McClear Service Info + `_ + .. [2] `CAMS McClear Automatic Access + `_ + """ + + if time_step in TIME_STEPS.keys(): + time_step_str = TIME_STEPS[time_step] + else: + print('WARNING: time step not recognized, 1 hour time step used!') + time_step_str = 'PT01H' + + names = MCCLEAR_COLUMNS + if verbose: + if (time_step == '1min') & (time_ref == 'UT'): + names += MCCLEAR_VERBOSE_COLUMNS + else: + verbose = False + print("Verbose mode only supports 1 min. UT time series!") + + if altitude is None: # Let SoDa get elevation from the NASA SRTM database + altitude = -999 + + # Start and end date should be in the format: yyyy-mm-dd + start_date = start_date.strftime('%Y-%m-%d') + end_date = end_date.strftime('%Y-%m-%d') + + email = email.replace('@', '%2540') # Format email address + + # Format verbose variable to the required format: {'true', 'false'} + verbose = str(verbose).lower() + + # Manual format the request url, due to uncommon usage of & and ; in url + url = ("http://{}/service/wps?Service=WPS&Request=Execute&" + "Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation&" + "DataInputs=latitude={};longitude={};altitude={};" + "date_begin={};date_end={};time_ref={};summarization={};" + "username={};verbose={}" + ).format(server, latitude, longitude, altitude, start_date, + end_date, time_ref, time_step_str, email, verbose) + + res = requests.get(url) + + # Invalid requests returns helpful XML error message + if res.headers['Content-Type'] == 'application/xml': + print('REQUEST ERROR MESSAGE:') + print(res.text.split('ows:ExceptionText')[1][1:-2]) + + # Check if returned file is a csv data file + elif res.headers['Content-Type'] == 'application/csv': + data = pd.read_csv(io.StringIO(res.content.decode('utf-8')), sep=';', + comment='#', header=None, names=names) + + obs_period = data['Observation period'].str.split('/') + + # Set index as the start observation time (left) and localize to UTC + if (label == 'left') | ((label is None) & (time_step != '1M')): + data.index = pd.to_datetime(obs_period.str[0], utc=True) + # Set index as the stop observation time (right) and localize to UTC + elif (label == 'right') | ((label is None) & (time_step == '1M')): + data.index = pd.to_datetime(obs_period.str[1], utc=True) + + data.index.name = None # Set index name to None + + # Change index for '1d' and '1M' to be date and not datetime + if time_step == '1d': + data.index = data.index.date + elif (time_step == '1M') & (label is not None): + data.index = data.index.date + # For monthly data with 'right' label, the index should be the last + # date of the month and not the first date of the following month + elif (time_step == '1M') & (time_step != 'left'): + data.index = data.index.date - pd.Timestamp(days=1) + + if not integrated: # Convert from Wh/m2 to W/m2 + integrated_cols = MCCLEAR_COLUMNS[1:6] + + if time_step == '1M': + time_delta = (pd.to_datetime(obs_period.str[1]) + - pd.to_datetime(obs_period.str[0])) + hours = time_delta.dt.total_seconds()/60/60 + data[integrated_cols] = data[integrated_cols] / hours + else: + data[integrated_cols] = (data[integrated_cols] / + TIME_STEPS_HOURS[time_step]) + + if map_variables: + data = data.rename(columns=MCCLEAR_VARIABLE_MAP) + + return data From 510f08ef8b2d0ee543c197a1433c6294ce410cde Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 22 Feb 2021 22:14:29 +0100 Subject: [PATCH 02/35] Revert "Add cams.get_cams_radiation function" This reverts commit d7deb80cdc5d1b63de5b2865a0c5cf24d4655fc1. --- docs/sphinx/source/api.rst | 1 - docs/sphinx/source/whatsnew/v0.9.0.rst | 3 - pvlib/iotools/__init__.py | 1 - pvlib/iotools/cams.py | 207 ------------------------- 4 files changed, 212 deletions(-) delete mode 100644 pvlib/iotools/cams.py diff --git a/docs/sphinx/source/api.rst b/docs/sphinx/source/api.rst index 31204b6f0d..8805d199a4 100644 --- a/docs/sphinx/source/api.rst +++ b/docs/sphinx/source/api.rst @@ -484,7 +484,6 @@ relevant to solar energy modeling. iotools.get_pvgis_tmy iotools.read_pvgis_tmy iotools.read_bsrn - iotools.get_cams_mcclear A :py:class:`~pvlib.location.Location` object may be created from metadata in some files. diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index a4e2688bb0..81e7a0c60b 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -64,9 +64,6 @@ Enhancements ~~~~~~~~~~~~ * Add :func:`~pvlib.iotools.read_bsrn` for reading BSRN solar radiation data files. (:pull:`1145`, :issue:`1015`) -* Add :func:`~pvlib.iotools.get_cams_radiation` for retrieving CAMS McClear - clear-sky radiation time series. - files. (:pull:`1145`, :issue:`1015`) * In :py:class:`~pvlib.modelchain.ModelChain`, attributes which contain output of models are now collected into ``ModelChain.results``. (:pull:`1076`, :issue:`1067`) diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py index 737ee66d4d..ba5d5e8807 100644 --- a/pvlib/iotools/__init__.py +++ b/pvlib/iotools/__init__.py @@ -14,4 +14,3 @@ from pvlib.iotools.psm3 import parse_psm3 # noqa: F401 from pvlib.iotools.pvgis import get_pvgis_tmy, read_pvgis_tmy # noqa: F401 from pvlib.iotools.bsrn import read_bsrn # noqa: F401 -from pvlib.iotools.cams import get_cams_radiation # noqa: F401 diff --git a/pvlib/iotools/cams.py b/pvlib/iotools/cams.py deleted file mode 100644 index c802420623..0000000000 --- a/pvlib/iotools/cams.py +++ /dev/null @@ -1,207 +0,0 @@ -"""Functions to access data from Copernicus Atmosphere Monitoring Service - (CAMS) radiation service. -.. codeauthor:: Adam R. Jensen -""" - -import pandas as pd -import requests -import io - - -MCCLEAR_COLUMNS = ['Observation period', 'TOA', 'Clear sky GHI', - 'Clear sky BHI', 'Clear sky DHI', 'Clear sky BNI'] - -MCCLEAR_VERBOSE_COLUMNS = ['sza', 'summer/winter split', 'tco3', 'tcwv', - 'AOD BC', 'AOD DU', 'AOD SS', 'AOD OR', 'AOD SU', - 'AOD NI', 'AOD AM', 'alpha', 'Aerosol type', - 'fiso', 'fvol', 'fgeo', 'albedo'] - -# Dictionary mapping CAMS MCCLEAR variables to pvlib names -MCCLEAR_VARIABLE_MAP = { - 'TOA': 'ghi_extra', - 'Clear sky GHI': 'ghi_clear', - 'Clear sky BHI': 'bhi_clear', - 'Clear sky DHI': 'dhi_clear', - 'Clear sky BNI': 'dni_clear', - 'sza': 'solar_zenith', -} - - -# Dictionary mapping Python time steps to CAMS time step format -TIME_STEPS = {'1min': 'PT01M', '15min': 'PT15M', '1h': 'PT01H', '1d': 'P01D', - '1M': 'P01M'} - -TIME_STEPS_HOURS = {'1min': 1/60, '15min': 15/60, '1h': 1, '1d': 24} - - -def get_cams_mcclear(start_date, end_date, latitude, longitude, email, - altitude=None, time_step='1h', time_ref='UT', - integrated=False, label=None, verbose=False, - map_variables=True, server='www.soda-is.com'): - """ - Retrieve time-series of clear-sky global, beam, and diffuse radiation - anywhere in the world from CAMS McClear [1]_ using the WGET service [2]_. - - - Geographical coverage: wordwide - Time coverage: 2004-01-01 to two days ago - Access: free, but requires registration, see [1]_ - Requests: max. 100 per day - - - Parameters - ---------- - start_date: datetime like - First day of the requested period - end_date: datetime like - Last day of the requested period - latitude: float - in decimal degrees, between -90 and 90, north is positive (ISO 19115) - longitude : float - in decimal degrees, between -180 and 180, east is positive (ISO 19115) - altitude: float, default: None - Altitude in meters. If None, then the altitude is determined from the - NASA SRTM database - email: str - Email address linked to a SoDa account - time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' - Time step of the time series, either 1 minute, 15 minute, hourly, - daily, or monthly. - time_reference: str, {'UT', 'TST'}, default: 'UT' - 'UT' (universal time) or 'TST' (True Solar Time) - integrated: boolean, default False - Whether to return integrated irradiation values (Wh/m^2) from CAMS or - average irradiance values (W/m^2) as is more commonly used - label: {‘right’, ‘left’}, default: None - Which bin edge label to label bucket with. The default is ‘left’ for - all frequency offsets except for ‘M’ which has a default of ‘right’. - verbose: boolean, default: False - Verbose mode outputs additional parameters (aerosols). Only avaiable - for 1 minute and universal time. See [1] for parameter description. - map_variables: bool, default: True - When true, renames columns of the Dataframe to pvlib variable names - where applicable. See variable MCCLEAR_VARIABLE_MAP. - server: str, default: 'www.soda-is.com' - Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) - - - Notes - ---------- - The returned data Dataframe includes the following fields: - - ======================= ====== ========================================== - Key, mapped key Format Description - ======================= ====== ========================================== - **Mapped field names are returned when the map_variables argument is True** - -------------------------------------------------------------------------- - Observation period str Beginning/end of time period - TOA, ghi_extra float Horizontal radiation at top of atmosphere - Clear sky GHI, ghi_clear float Clear sky global radiation on horizontal - Clear sky BHI, bhi_clear float Clear sky beam radiation on horizontal - Clear sky DHI, dhi_clear float Clear sky diffuse radiation on horizontal - Clear sky BNI, dni_clear float Clear sky beam radiation normal to sun - ======================= ====== ========================================== - - For the returned units see the integrated argument. For description of - additional output parameters in verbose mode, see [1]. - - Note that it is recommended to specify the latitude and longitude to at - least the fourth decimal place. - - Variables corresponding to standard pvlib variables are renamed, - e.g. `sza` becomes `solar_zenith`. See the - `pvlib.iotools.cams.MCCLEAR_VARIABLE_MAP` dict for the complete mapping. - - - References - ---------- - .. [1] `CAMS McClear Service Info - `_ - .. [2] `CAMS McClear Automatic Access - `_ - """ - - if time_step in TIME_STEPS.keys(): - time_step_str = TIME_STEPS[time_step] - else: - print('WARNING: time step not recognized, 1 hour time step used!') - time_step_str = 'PT01H' - - names = MCCLEAR_COLUMNS - if verbose: - if (time_step == '1min') & (time_ref == 'UT'): - names += MCCLEAR_VERBOSE_COLUMNS - else: - verbose = False - print("Verbose mode only supports 1 min. UT time series!") - - if altitude is None: # Let SoDa get elevation from the NASA SRTM database - altitude = -999 - - # Start and end date should be in the format: yyyy-mm-dd - start_date = start_date.strftime('%Y-%m-%d') - end_date = end_date.strftime('%Y-%m-%d') - - email = email.replace('@', '%2540') # Format email address - - # Format verbose variable to the required format: {'true', 'false'} - verbose = str(verbose).lower() - - # Manual format the request url, due to uncommon usage of & and ; in url - url = ("http://{}/service/wps?Service=WPS&Request=Execute&" - "Identifier=get_mcclear&version=1.0.0&RawDataOutput=irradiation&" - "DataInputs=latitude={};longitude={};altitude={};" - "date_begin={};date_end={};time_ref={};summarization={};" - "username={};verbose={}" - ).format(server, latitude, longitude, altitude, start_date, - end_date, time_ref, time_step_str, email, verbose) - - res = requests.get(url) - - # Invalid requests returns helpful XML error message - if res.headers['Content-Type'] == 'application/xml': - print('REQUEST ERROR MESSAGE:') - print(res.text.split('ows:ExceptionText')[1][1:-2]) - - # Check if returned file is a csv data file - elif res.headers['Content-Type'] == 'application/csv': - data = pd.read_csv(io.StringIO(res.content.decode('utf-8')), sep=';', - comment='#', header=None, names=names) - - obs_period = data['Observation period'].str.split('/') - - # Set index as the start observation time (left) and localize to UTC - if (label == 'left') | ((label is None) & (time_step != '1M')): - data.index = pd.to_datetime(obs_period.str[0], utc=True) - # Set index as the stop observation time (right) and localize to UTC - elif (label == 'right') | ((label is None) & (time_step == '1M')): - data.index = pd.to_datetime(obs_period.str[1], utc=True) - - data.index.name = None # Set index name to None - - # Change index for '1d' and '1M' to be date and not datetime - if time_step == '1d': - data.index = data.index.date - elif (time_step == '1M') & (label is not None): - data.index = data.index.date - # For monthly data with 'right' label, the index should be the last - # date of the month and not the first date of the following month - elif (time_step == '1M') & (time_step != 'left'): - data.index = data.index.date - pd.Timestamp(days=1) - - if not integrated: # Convert from Wh/m2 to W/m2 - integrated_cols = MCCLEAR_COLUMNS[1:6] - - if time_step == '1M': - time_delta = (pd.to_datetime(obs_period.str[1]) - - pd.to_datetime(obs_period.str[0])) - hours = time_delta.dt.total_seconds()/60/60 - data[integrated_cols] = data[integrated_cols] / hours - else: - data[integrated_cols] = (data[integrated_cols] / - TIME_STEPS_HOURS[time_step]) - - if map_variables: - data = data.rename(columns=MCCLEAR_VARIABLE_MAP) - - return data From 8132943918287afff406ce0273d1fe710d807795 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 30 Jul 2021 19:50:27 +0200 Subject: [PATCH 03/35] Fix inconsistencies Enforce the usage of the following terminology in the iotools inputs: start/end, latitude/longitude, and metadata. Also, latitude/longitude should come before start/end --- pvlib/iotools/ecmwf_macc.py | 28 ++++++++++++++-------------- pvlib/iotools/psm3.py | 34 +++++++++++++++++----------------- pvlib/iotools/pvgis.py | 16 ++++++++-------- pvlib/iotools/sodapro.py | 14 +++++++------- 4 files changed, 46 insertions(+), 46 deletions(-) diff --git a/pvlib/iotools/ecmwf_macc.py b/pvlib/iotools/ecmwf_macc.py index fc08eea35f..fb42454ee3 100644 --- a/pvlib/iotools/ecmwf_macc.py +++ b/pvlib/iotools/ecmwf_macc.py @@ -34,12 +34,12 @@ def ECMWFDataServer(*a, **kw): } -def _ecmwf(server, startdate, stopdate, params, targetname): +def _ecmwf(server, startdate, enddate, params, targetname): # see http://apps.ecmwf.int/datasets/data/macc-reanalysis/levtype=sfc/ server.retrieve({ "class": "mc", "dataset": "macc", - "date": "%s/to/%s" % (startdate, stopdate), + "date": "%s/to/%s" % (startdate, enddate), "expver": "rean", "grid": "0.75/0.75", "levtype": "sfc", @@ -53,7 +53,7 @@ def _ecmwf(server, startdate, stopdate, params, targetname): }) -def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, +def get_ecmwf_macc(filename, params, start, end, lookup_params=True, server=None, target=_ecmwf): """ Download data from ECMWF MACC Reanalysis API. @@ -64,9 +64,9 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, full path of file where to save data, ``.nc`` appended if not given params : str or sequence of str keynames of parameter[s] to download - startdate : datetime.datetime or datetime.date + start : datetime.datetime or datetime.date UTC date - stopdate : datetime.datetime or datetime.date + end : datetime.datetime or datetime.date UTC date lookup_params : bool, default True optional flag, if ``False``, then codes are already formatted @@ -137,7 +137,7 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, :func:`pvlib.iotools.get_ecmwf_macc`. :: - target(server, startdate, stopdate, params, filename) -> None + target(server, startdate, enddate, params, filename) -> None Examples -------- @@ -161,12 +161,12 @@ def get_ecmwf_macc(filename, params, startdate, stopdate, lookup_params=True, params = '/'.join(PARAMS.get(p) for p in params) except TypeError: params = PARAMS.get(params) - startdate = startdate.strftime('%Y-%m-%d') - stopdate = stopdate.strftime('%Y-%m-%d') + startdate = start.strftime('%Y-%m-%d') + enddate = end.strftime('%Y-%m-%d') if not server: server = ECMWFDataServer() t = threading.Thread(target=target, daemon=True, - args=(server, startdate, stopdate, params, filename)) + args=(server, startdate, enddate, params, filename)) t.start() return t @@ -191,8 +191,8 @@ def __init__(self, filename): # time resolution in hours self.time_size = self.data.dimensions['time'].size self.start_time = self.data['time'][0] - self.stop_time = self.data['time'][-1] - self.time_range = self.stop_time - self.start_time + self.end_time = self.data['time'][-1] + self.time_range = self.end_time - self.start_time self.delta_time = self.time_range / (self.time_size - 1) def get_nearest_indices(self, latitude, longitude): @@ -281,7 +281,7 @@ def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None): longitude : float longitude in degrees utc_time_range : sequence of datetime.datetime - pair of start and stop naive or UTC date-times + pair of start and end naive or UTC date-times Returns ------- @@ -295,9 +295,9 @@ def read_ecmwf_macc(filename, latitude, longitude, utc_time_range=None): if utc_time_range: start_idx = netCDF4.date2index( utc_time_range[0], nctime, select='before') - stop_idx = netCDF4.date2index( + end_idx = netCDF4.date2index( utc_time_range[-1], nctime, select='after') - time_slice = slice(start_idx, stop_idx + 1) + time_slice = slice(start_idx, end_idx + 1) else: time_slice = slice(0, ecmwf_macc.time_size) times = netCDF4.num2date(nctime[time_slice], nctime.units) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 758884160a..7276de908d 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -61,7 +61,7 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, Returns ------- - headers : dict + metadata : dict metadata from NREL PSM3 about the record, see :func:`pvlib.iotools.parse_psm3` for fields data : pandas.DataFrame @@ -177,14 +177,14 @@ def parse_psm3(fbuf): Returns ------- - headers : dict + metadata : dict metadata from NREL PSM3 about the record, see notes for fields data : pandas.DataFrame timeseries data from NREL PSM3 Notes ----- - The return is a tuple with two items. The first item is a header with + The return is a tuple with two items. The first item is a dictionary with metadata from NREL PSM3 about the record containing the following fields: * Source @@ -254,17 +254,17 @@ def parse_psm3(fbuf): `_ """ # The first 2 lines of the response are headers with metadata - header_fields = fbuf.readline().split(',') - header_fields[-1] = header_fields[-1].strip() # strip trailing newline - header_values = fbuf.readline().split(',') - header_values[-1] = header_values[-1].strip() # strip trailing newline - header = dict(zip(header_fields, header_values)) - # the response is all strings, so set some header types to numbers - header['Local Time Zone'] = int(header['Local Time Zone']) - header['Time Zone'] = int(header['Time Zone']) - header['Latitude'] = float(header['Latitude']) - header['Longitude'] = float(header['Longitude']) - header['Elevation'] = int(header['Elevation']) + metadata_fields = fbuf.readline().split(',') + metadata_fields[-1] = metadata_fields[-1].strip() # strip trailing newline + metadata_values = fbuf.readline().split(',') + metadata_values[-1] = metadata_values[-1].strip() # strip trailing newline + metadata = dict(zip(metadata_fields, metadata_values)) + # the response is all strings, so set some metadata types to numbers + metadata['Local Time Zone'] = int(metadata['Local Time Zone']) + metadata['Time Zone'] = int(metadata['Time Zone']) + metadata['Latitude'] = float(metadata['Latitude']) + metadata['Longitude'] = float(metadata['Longitude']) + metadata['Elevation'] = int(metadata['Elevation']) # get the column names so we can set the dtypes columns = fbuf.readline().split(',') columns[-1] = columns[-1].strip() # strip trailing newline @@ -282,10 +282,10 @@ def parse_psm3(fbuf): dtidx = pd.to_datetime( data[['Year', 'Month', 'Day', 'Hour', 'Minute']]) # in USA all timezones are integers - tz = 'Etc/GMT%+d' % -header['Time Zone'] + tz = 'Etc/GMT%+d' % -metadata['Time Zone'] data.index = pd.DatetimeIndex(dtidx).tz_localize(tz) - return header, data + return metadata, data def read_psm3(filename): @@ -300,7 +300,7 @@ def read_psm3(filename): Returns ------- - headers : dict + metadata : dict metadata from NREL PSM3 about the record, see :func:`pvlib.iotools.parse_psm3` for fields data : pandas.DataFrame diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index d43d4db87e..c4495b435a 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -363,7 +363,7 @@ def read_pvgis_hourly(filename, pvgis_format=None, map_variables=True): raise ValueError(err_msg) -def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, +def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, userhorizon=None, startyear=None, endyear=None, url=URL, timeout=30): """ @@ -372,9 +372,9 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, Parameters ---------- - lat : float + latitude : float Latitude in degrees north - lon : float + longitude : float Longitude in dgrees east outputformat : str, default 'json' Must be in ``['csv', 'basic', 'epw', 'json']``. See PVGIS TMY tool @@ -403,8 +403,8 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, TMY year for each month, ``None`` for basic and EPW inputs : dict the inputs, ``None`` for basic and EPW - meta : list or dict - meta data, ``None`` for basic + metadata : list or dict + file metadata, ``None`` for basic Raises ------ @@ -426,7 +426,7 @@ def get_pvgis_tmy(lat, lon, outputformat='json', usehorizon=True, `_ """ # use requests to format the query string by passing params dictionary - params = {'lat': lat, 'lon': lon, 'outputformat': outputformat} + params = {'lat': latitude, 'lon': longitude, 'outputformat': outputformat} # pvgis only likes 0 for False, and 1 for True, not strings, also the # default for usehorizon is already 1 (ie: True), so only set if False if not usehorizon: @@ -546,8 +546,8 @@ def read_pvgis_tmy(filename, pvgis_format=None): TMY year for each month, ``None`` for basic and EPW inputs : dict the inputs, ``None`` for basic and EPW - meta : list or dict - meta data, ``None`` for basic + metadata : list or dict + file metadata, ``None`` for basic Raises ------ diff --git a/pvlib/iotools/sodapro.py b/pvlib/iotools/sodapro.py index a27e6f1423..68fa82a396 100644 --- a/pvlib/iotools/sodapro.py +++ b/pvlib/iotools/sodapro.py @@ -41,7 +41,7 @@ '0 year 1 month 0 day 0 h 0 min 0 s': '1M'} -def get_cams(start, end, latitude, longitude, email, identifier='mcclear', +def get_cams(latitude, longitude, start, end, email, identifier='mcclear', altitude=None, time_step='1h', time_ref='UT', verbose=False, integrated=False, label=None, map_variables=True, server='www.soda-is.com', timeout=30): @@ -62,19 +62,19 @@ def get_cams(start, end, latitude, longitude, email, identifier='mcclear', Parameters ---------- - start: datetime like - First day of the requested period - end: datetime like - Last day of the requested period latitude: float in decimal degrees, between -90 and 90, north is positive (ISO 19115) longitude : float in decimal degrees, between -180 and 180, east is positive (ISO 19115) + start: datetime like + First day of the requested period + end: datetime like + Last day of the requested period email: str Email address linked to a SoDa account identifier: {'mcclear', 'cams_radiation'} Specify whether to retrieve CAMS Radiation or McClear parameters - altitude: float, default: None + altitude: float, optional Altitude in meters. If None, then the altitude is determined from the NASA SRTM database time_step: str, {'1min', '15min', '1h', '1d', '1M'}, default: '1h' @@ -96,7 +96,7 @@ def get_cams(start, end, latitude, longitude, email, identifier='mcclear', where applicable. See variable CAMS_VARIABLE_MAP. server: str, default: 'www.soda-is.com' Main server (www.soda-is.com) or backup mirror server (pro.soda-is.com) - timeout : int, default 30 + timeout : int, default: 30 Time in seconds to wait for server response before timeout Returns From 95efd76903d5140d95ea7cdf66c76890cde97899 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 30 Jul 2021 20:08:03 +0200 Subject: [PATCH 04/35] Have station arg precede start/end in get_bsrn --- pvlib/iotools/bsrn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/iotools/bsrn.py b/pvlib/iotools/bsrn.py index c9c3abb93d..6dd85f8e26 100644 --- a/pvlib/iotools/bsrn.py +++ b/pvlib/iotools/bsrn.py @@ -60,7 +60,7 @@ def _empty_dataframe_from_logical_records(logical_records): return pd.DataFrame(columns=columns) -def get_bsrn(start, end, station, username, password, +def get_bsrn(station, start, end, username, password, logical_records=('0100',), local_path=None): """ Retrieve ground measured irradiance data from the BSRN FTP server. From f4c2576fb19886fcf1c68e1e6d75b5ab801e0c41 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 30 Jul 2021 20:09:01 +0200 Subject: [PATCH 05/35] Update get_bsrn doc string --- pvlib/iotools/bsrn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pvlib/iotools/bsrn.py b/pvlib/iotools/bsrn.py index 6dd85f8e26..57e8c05e8e 100644 --- a/pvlib/iotools/bsrn.py +++ b/pvlib/iotools/bsrn.py @@ -73,12 +73,12 @@ def get_bsrn(station, start, end, username, password, Parameters ---------- + station: str + 3-letter BSRN station abbreviation start: datetime-like First day of the requested period end: datetime-like Last day of the requested period - station: str - 3-letter BSRN station abbreviation username: str username for accessing the BSRN FTP server password: str From 082c3f8d022884656b65f0caad0a281c62286717 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 30 Jul 2021 21:04:47 +0200 Subject: [PATCH 06/35] Change output order in psm3 --- pvlib/iotools/psm3.py | 16 +++++++------- pvlib/tests/iotools/test_psm3.py | 38 ++++++++++++++++---------------- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 7276de908d..561d1ea3ce 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -61,11 +61,11 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, Returns ------- + data : pandas.DataFrame + timeseries data from NREL PSM3 metadata : dict metadata from NREL PSM3 about the record, see :func:`pvlib.iotools.parse_psm3` for fields - data : pandas.DataFrame - timeseries data from NREL PSM3 Raises ------ @@ -177,10 +177,10 @@ def parse_psm3(fbuf): Returns ------- - metadata : dict - metadata from NREL PSM3 about the record, see notes for fields data : pandas.DataFrame timeseries data from NREL PSM3 + metadata : dict + metadata from NREL PSM3 about the record, see notes for fields Notes ----- @@ -240,7 +240,7 @@ def parse_psm3(fbuf): -------- >>> # Read a local PSM3 file: >>> with open(filename, 'r') as f: # doctest: +SKIP - ... metadata, df = iotools.parse_psm3(f) # doctest: +SKIP + ... df, metadata = iotools.parse_psm3(f) # doctest: +SKIP See Also -------- @@ -285,7 +285,7 @@ def parse_psm3(fbuf): tz = 'Etc/GMT%+d' % -metadata['Time Zone'] data.index = pd.DatetimeIndex(dtidx).tz_localize(tz) - return metadata, data + return data, metadata def read_psm3(filename): @@ -300,11 +300,11 @@ def read_psm3(filename): Returns ------- + data : pandas.DataFrame + timeseries data from NREL PSM3 metadata : dict metadata from NREL PSM3 about the record, see :func:`pvlib.iotools.parse_psm3` for fields - data : pandas.DataFrame - timeseries data from NREL PSM3 See Also -------- diff --git a/pvlib/tests/iotools/test_psm3.py b/pvlib/tests/iotools/test_psm3.py index ca3a5e3034..0b02c3c291 100644 --- a/pvlib/tests/iotools/test_psm3.py +++ b/pvlib/tests/iotools/test_psm3.py @@ -17,7 +17,7 @@ YEAR_TEST_DATA_5MIN = DATA_DIR / 'test_psm3_2019_5min.csv' MANUAL_TEST_DATA = DATA_DIR / 'test_read_psm3.csv' LATITUDE, LONGITUDE = 40.5137, -108.5449 -HEADER_FIELDS = [ +METADATA_FIELDS = [ 'Source', 'Location ID', 'City', 'State', 'Country', 'Latitude', 'Longitude', 'Time Zone', 'Elevation', 'Local Time Zone', 'Dew Point Units', 'DHI Units', 'DNI Units', 'GHI Units', @@ -46,7 +46,7 @@ def nrel_api_key(): return demo_key -def assert_psm3_equal(header, data, expected): +def assert_psm3_equal(data, metadata, expected): """check consistency of PSM3 data""" # check datevec columns assert np.allclose(data.Year, expected.Year) @@ -65,48 +65,48 @@ def assert_psm3_equal(header, data, expected): assert np.allclose(data['Wind Speed'], expected['Wind Speed']) assert np.allclose(data['Wind Direction'], expected['Wind Direction']) # check header - for hf in HEADER_FIELDS: - assert hf in header + for mf in METADATA_FIELDS: + assert mf in metadata # check timezone - assert (data.index.tzinfo.zone == 'Etc/GMT%+d' % -header['Time Zone']) + assert (data.index.tzinfo.zone == 'Etc/GMT%+d' % -metadata['Time Zone']) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_tmy(nrel_api_key): """test get_psm3 with a TMY""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='tmy-2017') + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='tmy-2017') expected = pd.read_csv(TMY_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_singleyear(nrel_api_key): """test get_psm3 with a single year""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2017', interval=30) + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='2017', interval=30) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_5min(nrel_api_key): """test get_psm3 for 5-minute data""" - header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, - PVLIB_EMAIL, names='2019', interval=5) + data, metadata = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + PVLIB_EMAIL, names='2019', interval=5) assert len(data) == 525600/5 first_day = data.loc['2019-01-01'] expected = pd.read_csv(YEAR_TEST_DATA_5MIN) - assert_psm3_equal(header, first_day, expected) + assert_psm3_equal(first_day, metadata, expected) @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_check_leap_day(nrel_api_key): - _, data_2012 = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, + data_2012, _ = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, names="2012", interval=60, leap_day=True) assert len(data_2012) == (8760 + 24) @@ -149,13 +149,13 @@ def io_input(request): def test_parse_psm3(io_input): """test parse_psm3""" - header, data = psm3.parse_psm3(io_input) + data, metadata = psm3.parse_psm3(io_input) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) def test_read_psm3(): """test read_psm3""" - header, data = psm3.read_psm3(MANUAL_TEST_DATA) + data, metadata = psm3.read_psm3(MANUAL_TEST_DATA) expected = pd.read_csv(YEAR_TEST_DATA) - assert_psm3_equal(header, data, expected) + assert_psm3_equal(data, metadata, expected) From b8bdd62b6bc9d8939cf2f2f18df54a3a9a776c22 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 16:40:07 +0200 Subject: [PATCH 07/35] Add variable map to pvgis_tmy with depreciating warning --- pvlib/iotools/pvgis.py | 60 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 56 insertions(+), 4 deletions(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index c4495b435a..0774680467 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -20,6 +20,8 @@ import requests import pandas as pd from pvlib.iotools import read_epw, parse_epw +import warnings +from pvlib._deprecation import pvlibDeprecationWarning URL = 'https://re.jrc.ec.europa.eu/api/' @@ -365,7 +367,7 @@ def read_pvgis_hourly(filename, pvgis_format=None, map_variables=True): def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, userhorizon=None, startyear=None, endyear=None, url=URL, - timeout=30): + map_variables=None, timeout=30): """ Get TMY data from PVGIS. For more information see the PVGIS [1]_ TMY tool documentation [2]_. @@ -392,6 +394,9 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, last year to calculate TMY, must be at least 10 years from first year url : str, default :const:`pvlib.iotools.pvgis.URL` base url of PVGIS API, append ``tmy`` to get TMY endpoint + map_variables: bool, default: True + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable PVGIS_VARIABLE_MAP. timeout : int, default 30 time in seconds to wait for server response before timeout @@ -452,7 +457,7 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, data = None, None, None, None if outputformat == 'json': src = res.json() - return _parse_pvgis_tmy_json(src) + data = _parse_pvgis_tmy_json(src) elif outputformat == 'csv': with io.BytesIO(res.content) as src: data = _parse_pvgis_tmy_csv(src) @@ -467,6 +472,18 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, # this line is never reached because if outputformat is not valid then # the response is HTTP/1.1 400 BAD REQUEST which is handled earlier pass + + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) + return data @@ -521,7 +538,7 @@ def _parse_pvgis_tmy_basic(src): return data, None, None, None -def read_pvgis_tmy(filename, pvgis_format=None): +def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): """ Read a file downloaded from PVGIS. @@ -537,6 +554,10 @@ def read_pvgis_tmy(filename, pvgis_format=None): ``outputformat='basic'``, please set `pvgis_format` to ``'basic'``. If `filename` is a buffer, then `pvgis_format` is required and must be in ``['csv', 'epw', 'json', 'basic']``. + map_variables: bool, default: True + When true, renames columns of the Dataframe to pvlib variable names + where applicable. See variable PVGIS_VARIABLE_MAP. + Returns ------- @@ -580,6 +601,16 @@ def read_pvgis_tmy(filename, pvgis_format=None): data, meta = parse_epw(filename) except AttributeError: # str/path has no .read() attribute data, meta = read_epw(filename) + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) return data, None, None, meta # NOTE: json, csv, and basic output formats have parsers defined as private @@ -594,7 +625,18 @@ def read_pvgis_tmy(filename, pvgis_format=None): except AttributeError: # str/path has no .read() attribute with open(str(filename), 'r') as fbuf: src = json.load(fbuf) - return _parse_pvgis_tmy_json(src) + data = _parse_pvgis_tmy_json(src) + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) + return data # CSV or basic: use the correct parser from this module # eg: _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() @@ -608,6 +650,16 @@ def read_pvgis_tmy(filename, pvgis_format=None): except AttributeError: # str/path has no .read() attribute with open(str(filename), 'rb') as fbuf: pvgis_data = pvgis_parser(fbuf) + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) return pvgis_data # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] From b659af2e9d9a4b2bc01b4115507b0528270c291a Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 22:31:02 +0200 Subject: [PATCH 08/35] Add variable_map to pvgis_tmy --- pvlib/iotools/pvgis.py | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index 0774680467..04e50d7ad7 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -394,7 +394,7 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, last year to calculate TMY, must be at least 10 years from first year url : str, default :const:`pvlib.iotools.pvgis.URL` base url of PVGIS API, append ``tmy`` to get TMY endpoint - map_variables: bool, default: True + map_variables: bool When true, renames columns of the Dataframe to pvlib variable names where applicable. See variable PVGIS_VARIABLE_MAP. timeout : int, default 30 @@ -457,13 +457,13 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, data = None, None, None, None if outputformat == 'json': src = res.json() - data = _parse_pvgis_tmy_json(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) elif outputformat == 'csv': with io.BytesIO(res.content) as src: - data = _parse_pvgis_tmy_csv(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_csv(src) elif outputformat == 'basic': with io.BytesIO(res.content) as src: - data = _parse_pvgis_tmy_basic(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_basic(src) elif outputformat == 'epw': with io.StringIO(res.content.decode('utf-8')) as src: data, meta = parse_epw(src) @@ -475,16 +475,16 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, if map_variables is None: warnings.warn( - 'PVGIS variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.10.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning ) map_variables = False if map_variables: data = data.rename(columns=PVGIS_VARIABLE_MAP) - return data + return data, months_selected, inputs, meta def _parse_pvgis_tmy_json(src): @@ -554,7 +554,7 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): ``outputformat='basic'``, please set `pvgis_format` to ``'basic'``. If `filename` is a buffer, then `pvgis_format` is required and must be in ``['csv', 'epw', 'json', 'basic']``. - map_variables: bool, default: True + map_variables: bool When true, renames columns of the Dataframe to pvlib variable names where applicable. See variable PVGIS_VARIABLE_MAP. @@ -625,7 +625,7 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): except AttributeError: # str/path has no .read() attribute with open(str(filename), 'r') as fbuf: src = json.load(fbuf) - data = _parse_pvgis_tmy_json(src) + data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) if map_variables is None: warnings.warn( 'PVGIS variable names will be renamed to pvlib conventions by ' @@ -636,7 +636,7 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): map_variables = False if map_variables: data = data.rename(columns=PVGIS_VARIABLE_MAP) - return data + return data, months_selected, inputs, meta # CSV or basic: use the correct parser from this module # eg: _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() @@ -646,10 +646,10 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): # NOTE: pvgis_parse() is a pvgis parser function from this module, # either _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() try: - pvgis_data = pvgis_parser(filename) + data, months_selected, inputs, meta = pvgis_parser(filename) except AttributeError: # str/path has no .read() attribute with open(str(filename), 'rb') as fbuf: - pvgis_data = pvgis_parser(fbuf) + data, months_selected, inputs, meta = pvgis_parser(fbuf) if map_variables is None: warnings.warn( 'PVGIS variable names will be renamed to pvlib conventions by ' @@ -659,8 +659,9 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): ) map_variables = False if map_variables: - data = data.rename(columns=PVGIS_VARIABLE_MAP) - return pvgis_data + data, months_selected, inputs, meta = \ + data.rename(columns=PVGIS_VARIABLE_MAP) + return data, months_selected, inputs, meta # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] err_msg = ( From 146a8b5366937151b8d05bbbced799f2583a39c3 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 22:31:22 +0200 Subject: [PATCH 09/35] Coverage for variable_map for read_pvigs_tmy --- pvlib/tests/iotools/test_pvgis.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index fc0638ed74..9bb8b5a951 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -356,6 +356,12 @@ def csv_meta(meta_expected): in meta_expected['outputs']['tmy_hourly']['variables'].items()] +@pytest.fixture +def pvgis_tmy_mapped_columns(): + return ['temp_air', 'relative_humidity', 'ghi', 'dni', 'dhi', 'IR(h)', + 'wind_speed', 'wind_direction', 'pressure'] + + @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected, @@ -481,6 +487,13 @@ def test_get_pvgis_tmy_error(): get_pvgis_tmy(45, 8, url='https://re.jrc.ec.europa.eu/') +def test_read_pvgis_tmy_map_variables(pvgis_tmy_mapped_columns): + fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' + actual, _, _, _ = read_pvgis_tmy(fn, map_variables=True) + assert all([a == e for a, e in + zip(actual.columns, pvgis_tmy_mapped_columns)]) + + def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' From 2ac6185f338be7df71480ba421da1256b6d7ff26 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 22:47:27 +0200 Subject: [PATCH 10/35] Add "versionchanged" to psm3 docs --- pvlib/iotools/psm3.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 561d1ea3ce..13853ee2fe 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -177,6 +177,7 @@ def parse_psm3(fbuf): Returns ------- + .. versionchanged:: 0.9.0 data : pandas.DataFrame timeseries data from NREL PSM3 metadata : dict From 5303e08721ab53e1509df535ac6458a2b9c04e4f Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 23:05:50 +0200 Subject: [PATCH 11/35] Update psm3 docs --- pvlib/iotools/psm3.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 13853ee2fe..f635c773a6 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -175,9 +175,13 @@ def parse_psm3(fbuf): fbuf: file-like object File-like object containing data to read. + .. versionchanged:: 0.9.0 + In version v0.9 and forward the output is data, metadata in order to + match the general iotools pattern. The order of the output were switched + in previous versions of the psm3 functions. + Returns ------- - .. versionchanged:: 0.9.0 data : pandas.DataFrame timeseries data from NREL PSM3 metadata : dict @@ -185,8 +189,11 @@ def parse_psm3(fbuf): Notes ----- - The return is a tuple with two items. The first item is a dictionary with - metadata from NREL PSM3 about the record containing the following fields: + The return is a tuple with two items. The first item is a dataframe with + the PSM3 timeseries data. + + The second item is a dictionary with metadata from NREL PSM3 about the + record containing the following fields: * Source * Location ID @@ -235,8 +242,6 @@ def parse_psm3(fbuf): * Surface Albedo Units * Version - The second item is a dataframe with the PSM3 timeseries data. - Examples -------- >>> # Read a local PSM3 file: From 941eb30ba682eb0bd62d2ca872049f329cb31cf1 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 2 Aug 2021 23:11:32 +0200 Subject: [PATCH 12/35] Update versionchanged --- pvlib/iotools/psm3.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index f635c773a6..90a4baa6d3 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -175,11 +175,6 @@ def parse_psm3(fbuf): fbuf: file-like object File-like object containing data to read. - .. versionchanged:: 0.9.0 - In version v0.9 and forward the output is data, metadata in order to - match the general iotools pattern. The order of the output were switched - in previous versions of the psm3 functions. - Returns ------- data : pandas.DataFrame @@ -258,6 +253,11 @@ def parse_psm3(fbuf): `_ .. [2] `Standard Time Series Data File Format `_ + + .. versionchanged:: 0.9.0 + In version v0.9 and forward the output is data, metadata in order to + match the general iotools pattern. The order of the output were switched + in previous versions of the psm3 functions. """ # The first 2 lines of the response are headers with metadata metadata_fields = fbuf.readline().split(',') From ab4ac2357cf63df1702a7d642845fabc1ac6785e Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Tue, 3 Aug 2021 12:13:06 +0200 Subject: [PATCH 13/35] Update versionchange message --- pvlib/iotools/psm3.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index 90a4baa6d3..e756c822a3 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -117,6 +117,11 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, `_ .. [4] `Physical Solar Model (PSM) v3 - Five Minute Temporal Resolution `_ + + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. """ # The well know text (WKT) representation of geometry notation is strict. # A POINT object is a string with longitude first, then the latitude, with @@ -255,9 +260,9 @@ def parse_psm3(fbuf): `_ .. versionchanged:: 0.9.0 - In version v0.9 and forward the output is data, metadata in order to - match the general iotools pattern. The order of the output were switched - in previous versions of the psm3 functions. + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. """ # The first 2 lines of the response are headers with metadata metadata_fields = fbuf.readline().split(',') @@ -322,6 +327,11 @@ def read_psm3(filename): `_ .. [2] `Standard Time Series Data File Format `_ + + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. """ with open(str(filename), 'r') as fbuf: content = parse_psm3(fbuf) From b4a52280f49b196f84fba79d01055d0569aa8b2c Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Wed, 4 Aug 2021 18:09:13 +0200 Subject: [PATCH 14/35] Correct ouput for get_pvgis_tmy with epw format --- pvlib/iotools/pvgis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index 04e50d7ad7..bd1741c362 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -467,7 +467,7 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, elif outputformat == 'epw': with io.StringIO(res.content.decode('utf-8')) as src: data, meta = parse_epw(src) - data = (data, None, None, meta) + months_selected, inputs = None, None else: # this line is never reached because if outputformat is not valid then # the response is HTTP/1.1 400 BAD REQUEST which is handled earlier From dbee07e2c0f3147865be897663613a14cbc687bc Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Wed, 4 Aug 2021 19:24:39 +0200 Subject: [PATCH 15/35] Update pvgis_tmy map test --- pvlib/tests/iotools/test_pvgis.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 9bb8b5a951..2f0022fd13 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -490,8 +490,7 @@ def test_get_pvgis_tmy_error(): def test_read_pvgis_tmy_map_variables(pvgis_tmy_mapped_columns): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' actual, _, _, _ = read_pvgis_tmy(fn, map_variables=True) - assert all([a == e for a, e in - zip(actual.columns, pvgis_tmy_mapped_columns)]) + assert all([c in pvgis_tmy_mapped_columns for c in actual.columns]) def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, From 42db48858d0e0875997dc229ba5bc45a015a0837 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Wed, 4 Aug 2021 22:56:49 +0200 Subject: [PATCH 16/35] Update v0.9.0.rst --- docs/sphinx/source/whatsnew/v0.9.0.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index fa7ed689b5..6f41d25117 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -49,6 +49,17 @@ Breaking changes :py:meth:`~pvlib.pvsystem.PVSystem.calcparams_desoto` and :py:meth:`~pvlib.pvsystem.PVSystem.calcparams_cec` (:issue:`1118`, :pull:`1222`) +* Switched the order of the outputs from the PSM3 iotools, notably + :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` + (:issues:`1245`, :pull`1268`) + +* Changed the naming of the inputs `stardate`/`enddate` to `start`/end` in + :py:func:`~pvlib.iotools.get_ecmwf_macc` + (:issues:`1245`, :pull`1268`) + +* Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in + :py:func:`~pvlib.iotools.get_pvgis_tmy` and :py:func:`~pvlib.iotools.read_pvgis_tmy` + (:issues:`1245`, :pull`1268`) Deprecations ~~~~~~~~~~~~ @@ -111,6 +122,8 @@ Enhancements :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) +* Added `map_variables` option to :func:`~pvlib.iotools.get_pvgis_tmy` and + :func:`~pvlib.iotools.read_pvgis_tmy` (:issues:`1245`, :pull`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. (:pull:`1254`, :pull:`1145`, :issue:`1015`) From 58242b5606cb6da13deb557f8d6e7befbc8ed15e Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Thu, 5 Aug 2021 13:20:26 +0200 Subject: [PATCH 17/35] Implement comments from review by kanderso-nrel --- docs/sphinx/source/introtutorial.rst | 11 +----- docs/sphinx/source/whatsnew/v0.9.0.rst | 4 +- pvlib/iotools/psm3.py | 30 +++++++-------- pvlib/iotools/pvgis.py | 36 +++++------------- pvlib/tests/iotools/test_pvgis.py | 51 +++++++++++++++----------- 5 files changed, 58 insertions(+), 74 deletions(-) diff --git a/docs/sphinx/source/introtutorial.rst b/docs/sphinx/source/introtutorial.rst index b3a9b9a7b6..3e62736689 100644 --- a/docs/sphinx/source/introtutorial.rst +++ b/docs/sphinx/source/introtutorial.rst @@ -58,18 +58,11 @@ includes irradiation, temperature and wind speed. .. ipython:: python - variables_translation = { - "Gb(n)": "dni", - "G(h)": "ghi", - "Gd(h)": "dhi", - "T2m": "temp_air", - "WS10m": "wind_speed", - } tmys = [] for location in coordinates: latitude, longitude, name, altitude, timezone = location - weather = pvlib.iotools.get_pvgis_tmy(latitude, longitude)[0] - weather = weather.rename(columns=variables_translation) + weather = pvlib.iotools.get_pvgis_tmy(latitude, longitude, + map_variables=True)[0] weather.index.name = "utc_time" tmys.append(weather) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 6f41d25117..139c9f31a9 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -53,7 +53,7 @@ Breaking changes :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` (:issues:`1245`, :pull`1268`) -* Changed the naming of the inputs `stardate`/`enddate` to `start`/end` in +* Changed the naming of the inputs `startdate`/`enddate` to `start`/end` in :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issues:`1245`, :pull`1268`) @@ -123,7 +123,7 @@ Enhancements solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) * Added `map_variables` option to :func:`~pvlib.iotools.get_pvgis_tmy` and - :func:`~pvlib.iotools.read_pvgis_tmy` (:issues:`1245`, :pull`1268`) + :func:`~pvlib.iotools.read_pvgis_tmy` (:issue:`1250`, :pull:`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. (:pull:`1254`, :pull:`1145`, :issue:`1015`) diff --git a/pvlib/iotools/psm3.py b/pvlib/iotools/psm3.py index e756c822a3..44fba674b1 100644 --- a/pvlib/iotools/psm3.py +++ b/pvlib/iotools/psm3.py @@ -28,6 +28,11 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, Retrieve NSRDB PSM3 timeseries weather data from the PSM3 API. The NSRDB is described in [1]_ and the PSM3 API is described in [2]_, [3]_, and [4]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- latitude : float or int @@ -117,11 +122,6 @@ def get_psm3(latitude, longitude, api_key, email, names='tmy', interval=60, `_ .. [4] `Physical Solar Model (PSM) v3 - Five Minute Temporal Resolution `_ - - .. versionchanged:: 0.9.0 - The function now returns a tuple where the first element is a dataframe - and the second element is a dictionary containing metadata. Previous - versions of this function had the return values switched. """ # The well know text (WKT) representation of geometry notation is strict. # A POINT object is a string with longitude first, then the latitude, with @@ -175,6 +175,11 @@ def parse_psm3(fbuf): Parse an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- fbuf: file-like object @@ -258,11 +263,6 @@ def parse_psm3(fbuf): `_ .. [2] `Standard Time Series Data File Format `_ - - .. versionchanged:: 0.9.0 - The function now returns a tuple where the first element is a dataframe - and the second element is a dictionary containing metadata. Previous - versions of this function had the return values switched. """ # The first 2 lines of the response are headers with metadata metadata_fields = fbuf.readline().split(',') @@ -304,6 +304,11 @@ def read_psm3(filename): Read an NSRDB PSM3 weather file (formatted as SAM CSV). The NSRDB is described in [1]_ and the SAM CSV format is described in [2]_. + .. versionchanged:: 0.9.0 + The function now returns a tuple where the first element is a dataframe + and the second element is a dictionary containing metadata. Previous + versions of this function had the return values switched. + Parameters ---------- filename: str @@ -327,11 +332,6 @@ def read_psm3(filename): `_ .. [2] `Standard Time Series Data File Format `_ - - .. versionchanged:: 0.9.0 - The function now returns a tuple where the first element is a dataframe - and the second element is a dictionary containing metadata. Previous - versions of this function had the return values switched. """ with open(str(filename), 'r') as fbuf: content = parse_psm3(fbuf) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index bd1741c362..45fbc0aa96 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -592,6 +592,15 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): else: outputformat = pvgis_format + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + # parse the pvgis file based on the output format, either 'epw', 'json', # 'csv', or 'basic' @@ -601,14 +610,6 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): data, meta = parse_epw(filename) except AttributeError: # str/path has no .read() attribute data, meta = read_epw(filename) - if map_variables is None: - warnings.warn( - 'PVGIS variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.10.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning - ) - map_variables = False if map_variables: data = data.rename(columns=PVGIS_VARIABLE_MAP) return data, None, None, meta @@ -626,14 +627,6 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): with open(str(filename), 'r') as fbuf: src = json.load(fbuf) data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) - if map_variables is None: - warnings.warn( - 'PVGIS variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.10.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning - ) - map_variables = False if map_variables: data = data.rename(columns=PVGIS_VARIABLE_MAP) return data, months_selected, inputs, meta @@ -650,17 +643,8 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): except AttributeError: # str/path has no .read() attribute with open(str(filename), 'rb') as fbuf: data, months_selected, inputs, meta = pvgis_parser(fbuf) - if map_variables is None: - warnings.warn( - 'PVGIS variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.10.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning - ) - map_variables = False if map_variables: - data, months_selected, inputs, meta = \ - data.rename(columns=PVGIS_VARIABLE_MAP) + data = data.rename(columns=PVGIS_VARIABLE_MAP) return data, months_selected, inputs, meta # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 2f0022fd13..715806762a 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -366,7 +366,7 @@ def pvgis_tmy_mapped_columns(): @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected, meta_expected): - pvgis_data = get_pvgis_tmy(45, 8) + pvgis_data = get_pvgis_tmy(45, 8, map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) @@ -399,26 +399,28 @@ def _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_kwargs(userhorizon_expected): - _, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False) + _, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False, + map_variables=False) assert inputs['meteo_data']['use_horizon'] is False data, _, _, _ = get_pvgis_tmy( - 45, 8, userhorizon=[0, 10, 20, 30, 40, 15, 25, 5]) + 45, 8, userhorizon=[0, 10, 20, 30, 40, 15, 25, 5], map_variables=False) assert np.allclose( data['G(h)'], userhorizon_expected['G(h)'].values) assert np.allclose( data['Gb(n)'], userhorizon_expected['Gb(n)'].values) assert np.allclose( data['Gd(h)'], userhorizon_expected['Gd(h)'].values) - _, _, inputs, _ = get_pvgis_tmy(45, 8, startyear=2005) + _, _, inputs, _ = get_pvgis_tmy(45, 8, startyear=2005, map_variables=False) assert inputs['meteo_data']['year_min'] == 2005 - _, _, inputs, _ = get_pvgis_tmy(45, 8, endyear=2016) + _, _, inputs, _ = get_pvgis_tmy(45, 8, endyear=2016, map_variables=False) assert inputs['meteo_data']['year_max'] == 2016 @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_basic(expected, meta_expected): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic', + map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) @@ -433,7 +435,7 @@ def _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data): @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv', map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) @@ -464,7 +466,7 @@ def _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_epw(expected, epw_meta): - pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw') + pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw', map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) @@ -497,15 +499,16 @@ def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='json') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='json', map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) with fn.open('r') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='json') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='json', + map_variables=False) _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, meta_expected, pvgis_data) @@ -513,13 +516,14 @@ def test_read_pvgis_tmy_json(expected, month_year_expected, inputs_expected, def test_read_pvgis_tmy_epw(expected, epw_meta): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.epw' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='epw') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='epw', map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) with fn.open('r') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='epw') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='epw', + map_variables=False) _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) @@ -527,15 +531,16 @@ def test_read_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.csv' # infer outputformat from file extensions - pvgis_data = read_pvgis_tmy(fn) + pvgis_data = read_pvgis_tmy(fn, map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='csv') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='csv', map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) with fn.open('rb') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='csv') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='csv', + map_variables=False) _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta, pvgis_data) @@ -544,20 +549,22 @@ def test_read_pvgis_tmy_basic(expected, meta_expected): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.txt' # XXX: can't infer outputformat from file extensions for basic with pytest.raises(ValueError, match="pvgis format 'txt' was unknown"): - read_pvgis_tmy(fn) + read_pvgis_tmy(fn, map_variables=False) # explicit pvgis outputformat - pvgis_data = read_pvgis_tmy(fn, pvgis_format='basic') + pvgis_data = read_pvgis_tmy(fn, pvgis_format='basic', map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) with fn.open('rb') as fbuf: - pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='basic') + pvgis_data = read_pvgis_tmy(fbuf, pvgis_format='basic', + map_variables=False) _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) # file buffer raises TypeError if passed to pathlib.Path() with pytest.raises(TypeError): - read_pvgis_tmy(fbuf) + read_pvgis_tmy(fbuf, map_variables=False) def test_read_pvgis_tmy_exception(): bad_outputformat = 'bad' err_msg = f"pvgis format '{bad_outputformat:s}' was unknown" with pytest.raises(ValueError, match=err_msg): - read_pvgis_tmy('filename', pvgis_format=bad_outputformat) + read_pvgis_tmy('filename', pvgis_format=bad_outputformat, + map_variables=False) From 7f3b32c46428bd4582e3b704dea3ad050218b346 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Thu, 5 Aug 2021 13:35:45 +0200 Subject: [PATCH 18/35] Remove 'empty' columns when an empty dataframe is returned by bsrn --- pvlib/iotools/bsrn.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pvlib/iotools/bsrn.py b/pvlib/iotools/bsrn.py index 57e8c05e8e..4c02c10f90 100644 --- a/pvlib/iotools/bsrn.py +++ b/pvlib/iotools/bsrn.py @@ -57,6 +57,7 @@ def _empty_dataframe_from_logical_records(logical_records): columns = [] for lr in logical_records: columns += BSRN_COLUMNS[lr][2:] + columns = [c for c in columns if c != 'empty'] return pd.DataFrame(columns=columns) From d67a1e9a50ed81778606fcda8be54486fa8234d5 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Thu, 5 Aug 2021 13:36:15 +0200 Subject: [PATCH 19/35] Remove admonition about pvgis renaming in introtutorial.rst --- docs/sphinx/source/introtutorial.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/sphinx/source/introtutorial.rst b/docs/sphinx/source/introtutorial.rst index 3e62736689..fe79d396a4 100644 --- a/docs/sphinx/source/introtutorial.rst +++ b/docs/sphinx/source/introtutorial.rst @@ -52,10 +52,6 @@ the :ref:`iotools` module. In this example we will be using PVGIS, one of the data sources available, to retrieve a Typical Meteorological Year (TMY) which includes irradiation, temperature and wind speed. -.. note:: PVGIS uses different naming conventions, so it is required to rename - the weather data variables before using them. Data is already UTC-localized, - so conversion to local timezone is optional. - .. ipython:: python tmys = [] From d9bc9cb4df958f03c61fde0787cc4e90e7ee2d99 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Thu, 5 Aug 2021 13:39:57 +0200 Subject: [PATCH 20/35] Fix typo in pvigs_tmy documentation --- pvlib/iotools/pvgis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index 45fbc0aa96..209726d9e8 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -377,7 +377,7 @@ def get_pvgis_tmy(latitude, longitude, outputformat='json', usehorizon=True, latitude : float Latitude in degrees north longitude : float - Longitude in dgrees east + Longitude in degrees east outputformat : str, default 'json' Must be in ``['csv', 'basic', 'epw', 'json']``. See PVGIS TMY tool documentation [2]_ for more info. From 162cd2ff8df6569b522e4a79db59fbf7ae6bb346 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Thu, 5 Aug 2021 23:31:46 +0200 Subject: [PATCH 21/35] Fix references in whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 139c9f31a9..fe5fde4646 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -59,7 +59,7 @@ Breaking changes * Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in :py:func:`~pvlib.iotools.get_pvgis_tmy` and :py:func:`~pvlib.iotools.read_pvgis_tmy` - (:issues:`1245`, :pull`1268`) + (:issue:`1245`, :pull:`1268`) Deprecations ~~~~~~~~~~~~ From ecc549a5897c59074d9e6b0f1de02f51b4db63e0 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 6 Aug 2021 10:09:45 +0200 Subject: [PATCH 22/35] Refactor pvigs_tmy --- pvlib/iotools/pvgis.py | 49 ++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 26 deletions(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index 209726d9e8..186da05713 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -591,16 +591,6 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): outputformat = Path(filename).suffix[1:].lower() else: outputformat = pvgis_format - - if map_variables is None: - warnings.warn( - 'PVGIS variable names will be renamed to pvlib conventions by ' - 'default starting in pvlib 0.10.0. Specify map_variables=True ' - 'to enable that behavior now, or specify map_variables=False ' - 'to hide this warning.', pvlibDeprecationWarning - ) - map_variables = False - # parse the pvgis file based on the output format, either 'epw', 'json', # 'csv', or 'basic' @@ -610,9 +600,7 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): data, meta = parse_epw(filename) except AttributeError: # str/path has no .read() attribute data, meta = read_epw(filename) - if map_variables: - data = data.rename(columns=PVGIS_VARIABLE_MAP) - return data, None, None, meta + months_selected, inputs = None, None # NOTE: json, csv, and basic output formats have parsers defined as private # functions in this module @@ -620,20 +608,17 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): # JSON: use Python built-in json module to convert file contents to a # Python dictionary, and pass the dictionary to the _parse_pvgis_tmy_json() # function from this module - if outputformat == 'json': + elif outputformat == 'json': try: src = json.load(filename) except AttributeError: # str/path has no .read() attribute with open(str(filename), 'r') as fbuf: src = json.load(fbuf) data, months_selected, inputs, meta = _parse_pvgis_tmy_json(src) - if map_variables: - data = data.rename(columns=PVGIS_VARIABLE_MAP) - return data, months_selected, inputs, meta # CSV or basic: use the correct parser from this module # eg: _parse_pvgis_tmy_csv() or _parse_pvgist_tmy_basic() - if outputformat in ['csv', 'basic']: + elif outputformat in ['csv', 'basic']: # get the correct parser function for this output format from globals() pvgis_parser = globals()['_parse_pvgis_tmy_{:s}'.format(outputformat)] # NOTE: pvgis_parse() is a pvgis parser function from this module, @@ -643,12 +628,24 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): except AttributeError: # str/path has no .read() attribute with open(str(filename), 'rb') as fbuf: data, months_selected, inputs, meta = pvgis_parser(fbuf) - if map_variables: - data = data.rename(columns=PVGIS_VARIABLE_MAP) - return data, months_selected, inputs, meta - # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] - err_msg = ( - "pvgis format '{:s}' was unknown, must be either 'epw', 'json', 'csv'" - ", or 'basic'").format(outputformat) - raise ValueError(err_msg) + else: + # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] + err_msg = ( + "pvgis format '{:s}' was unknown, must be either 'epw', 'json', 'csv'" + ", or 'basic'").format(outputformat) + raise ValueError(err_msg) + + if map_variables is None: + warnings.warn( + 'PVGIS variable names will be renamed to pvlib conventions by ' + 'default starting in pvlib 0.10.0. Specify map_variables=True ' + 'to enable that behavior now, or specify map_variables=False ' + 'to hide this warning.', pvlibDeprecationWarning + ) + map_variables = False + if map_variables: + data = data.rename(columns=PVGIS_VARIABLE_MAP) + + return data, months_selected, inputs, meta + From 9cb3602ebdef9c4a71646c59014971b2914a3ddf Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 6 Aug 2021 10:16:50 +0200 Subject: [PATCH 23/35] Fix stickler --- pvlib/iotools/pvgis.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pvlib/iotools/pvgis.py b/pvlib/iotools/pvgis.py index 186da05713..3bb2f977c6 100644 --- a/pvlib/iotools/pvgis.py +++ b/pvlib/iotools/pvgis.py @@ -630,10 +630,10 @@ def read_pvgis_tmy(filename, pvgis_format=None, map_variables=None): data, months_selected, inputs, meta = pvgis_parser(fbuf) else: - # raise exception if pvgis format isn't in ['csv', 'basic', 'epw', 'json'] + # raise exception if pvgis format isn't in ['csv','basic','epw','json'] err_msg = ( - "pvgis format '{:s}' was unknown, must be either 'epw', 'json', 'csv'" - ", or 'basic'").format(outputformat) + "pvgis format '{:s}' was unknown, must be either 'epw', 'json', " + "'csv', or 'basic'").format(outputformat) raise ValueError(err_msg) if map_variables is None: From 0b6ccb5facd6ba718627c733cec09fc971b63922 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 6 Aug 2021 10:56:27 +0200 Subject: [PATCH 24/35] Fix issue references in whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index fe5fde4646..466aec25ef 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -51,11 +51,11 @@ Breaking changes * Switched the order of the outputs from the PSM3 iotools, notably :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` - (:issues:`1245`, :pull`1268`) + (:issue:`1245`, :pull:`1268`) * Changed the naming of the inputs `startdate`/`enddate` to `start`/end` in :py:func:`~pvlib.iotools.get_ecmwf_macc` - (:issues:`1245`, :pull`1268`) + (:issue:`1245`, :pull:`1268`) * Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in :py:func:`~pvlib.iotools.get_pvgis_tmy` and :py:func:`~pvlib.iotools.read_pvgis_tmy` From aedc064ca9a18e4cd0a45066abed1faf20e681a0 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 6 Aug 2021 16:00:52 +0200 Subject: [PATCH 25/35] Coverage for get_pvgis_tmy map_variables --- pvlib/tests/iotools/test_pvgis.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 715806762a..7d7876e370 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -489,6 +489,13 @@ def test_get_pvgis_tmy_error(): get_pvgis_tmy(45, 8, url='https://re.jrc.ec.europa.eu/') +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_get_pvgis_map_variables(pvgis_tmy_mapped_columns): + actual, _, _, _ = get_pvgis_tmy(45, 8, map_variables=True) + assert all([c in pvgis_tmy_mapped_columns for c in actual.columns]) + + def test_read_pvgis_tmy_map_variables(pvgis_tmy_mapped_columns): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.json' actual, _, _, _ = read_pvgis_tmy(fn, map_variables=True) From 7dd2c3c6ff76321b47415eff67401b94472b0748 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 6 Aug 2021 16:22:34 +0200 Subject: [PATCH 26/35] Fix errors in whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 466aec25ef..fe827148e8 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -53,13 +53,12 @@ Breaking changes :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` (:issue:`1245`, :pull:`1268`) -* Changed the naming of the inputs `startdate`/`enddate` to `start`/end` in +* Changed the naming of the inputs `startdate`/`enddate` to `start`/`end` in :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issue:`1245`, :pull:`1268`) * Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in - :py:func:`~pvlib.iotools.get_pvgis_tmy` and :py:func:`~pvlib.iotools.read_pvgis_tmy` - (:issue:`1245`, :pull:`1268`) + :py:func:`~pvlib.iotools.get_pvgis_tmy` (:issue:`1245`, :pull:`1268`) Deprecations ~~~~~~~~~~~~ From 6d1045c6529f823ae86b46d2b76eacb834909b83 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Mon, 9 Aug 2021 19:58:46 +0200 Subject: [PATCH 27/35] Add double backticks in whatsnew Co-authored-by: Will Holmgren --- docs/sphinx/source/whatsnew/v0.9.0.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index fe827148e8..7cd34fde74 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -53,7 +53,7 @@ Breaking changes :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` (:issue:`1245`, :pull:`1268`) -* Changed the naming of the inputs `startdate`/`enddate` to `start`/`end` in +* Changed the naming of the inputs ``startdate``/``enddate`` to ``start``/``end`` in :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issue:`1245`, :pull:`1268`) From 01f5d3cd7725b5a9fed4be7dca1ba471f2834f90 Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Mon, 9 Aug 2021 19:59:02 +0200 Subject: [PATCH 28/35] Add double backticks in whatsnew Co-authored-by: Will Holmgren --- docs/sphinx/source/whatsnew/v0.9.0.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 7cd34fde74..cd433ebc14 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -57,7 +57,7 @@ Breaking changes :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issue:`1245`, :pull:`1268`) -* Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in +* Change the naming of the inputs ``lat``/``lon`` to ``latitude``/``longitude`` in :py:func:`~pvlib.iotools.get_pvgis_tmy` (:issue:`1245`, :pull:`1268`) Deprecations From 381870f93914f933596cb07bce000b92c4e263ca Mon Sep 17 00:00:00 2001 From: "Adam R. Jensen" <39184289+AdamRJensen@users.noreply.github.com> Date: Mon, 9 Aug 2021 19:59:21 +0200 Subject: [PATCH 29/35] Add double backticks in whatsnew Co-authored-by: Will Holmgren --- docs/sphinx/source/whatsnew/v0.9.0.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index cd433ebc14..87c1ac1e93 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -121,7 +121,7 @@ Enhancements :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) -* Added `map_variables` option to :func:`~pvlib.iotools.get_pvgis_tmy` and +* Added ``map_variables`` option to :func:`~pvlib.iotools.get_pvgis_tmy` and :func:`~pvlib.iotools.read_pvgis_tmy` (:issue:`1250`, :pull:`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. From 17f9138b6805fec5c3e3c219e85d9e28dc16301f Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:06:09 +0200 Subject: [PATCH 30/35] Add double backticks in whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index 87c1ac1e93..fe827148e8 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -53,11 +53,11 @@ Breaking changes :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` (:issue:`1245`, :pull:`1268`) -* Changed the naming of the inputs ``startdate``/``enddate`` to ``start``/``end`` in +* Changed the naming of the inputs `startdate`/`enddate` to `start`/`end` in :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issue:`1245`, :pull:`1268`) -* Change the naming of the inputs ``lat``/``lon`` to ``latitude``/``longitude`` in +* Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in :py:func:`~pvlib.iotools.get_pvgis_tmy` (:issue:`1245`, :pull:`1268`) Deprecations @@ -121,7 +121,7 @@ Enhancements :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) -* Added ``map_variables`` option to :func:`~pvlib.iotools.get_pvgis_tmy` and +* Added `map_variables` option to :func:`~pvlib.iotools.get_pvgis_tmy` and :func:`~pvlib.iotools.read_pvgis_tmy` (:issue:`1250`, :pull:`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. From c8e5a6a6f703a8c2e6e9d333e30ed36d4e6c05ac Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:28:58 +0200 Subject: [PATCH 31/35] Change fail version to 0.10 in test_modelchain --- pvlib/tests/test_modelchain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pvlib/tests/test_modelchain.py b/pvlib/tests/test_modelchain.py index e52bda72bd..86b7411d0f 100644 --- a/pvlib/tests/test_modelchain.py +++ b/pvlib/tests/test_modelchain.py @@ -1778,7 +1778,7 @@ def test_ModelChain_no_extra_kwargs(sapm_dc_snl_ac_system, location): ModelChain(sapm_dc_snl_ac_system, location, arbitrary_kwarg='value') -@fail_on_pvlib_version('1.0') +@fail_on_pvlib_version('0.10') def test_ModelChain_attributes_deprecated_10(sapm_dc_snl_ac_system, location): match = 'Use ModelChain.results' mc = ModelChain(sapm_dc_snl_ac_system, location) From 8c77b65c5667bf23a80ab7c82f87cf8ca0a76fea Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:29:46 +0200 Subject: [PATCH 32/35] Coverage for deprecation warnings --- pvlib/tests/iotools/test_pvgis.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 7d7876e370..3acccb42db 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -9,7 +9,9 @@ import requests from pvlib.iotools import get_pvgis_tmy, read_pvgis_tmy from pvlib.iotools import get_pvgis_hourly, read_pvgis_hourly -from ..conftest import DATA_DIR, RERUNS, RERUNS_DELAY, assert_frame_equal +from ..conftest import (DATA_DIR, RERUNS, RERUNS_DELAY, assert_frame_equal, + fail_on_pvlib_version) +from pvlib._deprecation import pvlibDeprecationWarning # PVGIS Hourly tests @@ -362,6 +364,17 @@ def pvgis_tmy_mapped_columns(): 'wind_speed', 'wind_direction', 'pressure'] +@fail_on_pvlib_version('0.10') +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_pvgis_tmy_variable_map_deprecating_warning_0_10(): + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + _ = get_pvgis_tmy(45, 8) + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.epw' + _ = read_pvgis_tmy(fn) + + @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected, From 351f549ab31afecf4f22bfd0d29e8aa472addf4d Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:34:10 +0200 Subject: [PATCH 33/35] Fix doublebackticks in whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index fe827148e8..87c1ac1e93 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -53,11 +53,11 @@ Breaking changes :py:func:`~pvlib.iotools.get_psm3` and :py:func:`~pvlib.iotools.read_psm3` (:issue:`1245`, :pull:`1268`) -* Changed the naming of the inputs `startdate`/`enddate` to `start`/`end` in +* Changed the naming of the inputs ``startdate``/``enddate`` to ``start``/``end`` in :py:func:`~pvlib.iotools.get_ecmwf_macc` (:issue:`1245`, :pull:`1268`) -* Change the naming of the inputs `lat`/`lon` to `latitude`/`longitude` in +* Change the naming of the inputs ``lat``/``lon`` to ``latitude``/``longitude`` in :py:func:`~pvlib.iotools.get_pvgis_tmy` (:issue:`1245`, :pull:`1268`) Deprecations @@ -121,7 +121,7 @@ Enhancements :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) -* Added `map_variables` option to :func:`~pvlib.iotools.get_pvgis_tmy` and +* Added ``map_variables`` option to :func:`~pvlib.iotools.get_pvgis_tmy` and :func:`~pvlib.iotools.read_pvgis_tmy` (:issue:`1250`, :pull:`1268`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` for retrieving and reading BSRN solar radiation data files. From 0aee6a855b9850ac90e6011883fee0be0da75145 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:38:20 +0200 Subject: [PATCH 34/35] Fix stickler --- pvlib/tests/iotools/test_pvgis.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 3acccb42db..5a097d25e5 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -368,9 +368,9 @@ def pvgis_tmy_mapped_columns(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_pvgis_tmy_variable_map_deprecating_warning_0_10(): - with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): _ = get_pvgis_tmy(45, 8) - with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): + with pytest.warns(pvlibDeprecationWarning, match='names will be renamed'): fn = DATA_DIR / 'tmy_45.000_8.000_2005_2016.epw' _ = read_pvgis_tmy(fn) From 6276ccc9872917167fc9a0ee7c15a4a97d90bc49 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 9 Aug 2021 20:40:47 +0200 Subject: [PATCH 35/35] Replace tab with spaces in introtutorial --- docs/sphinx/source/introtutorial.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/source/introtutorial.rst b/docs/sphinx/source/introtutorial.rst index fe79d396a4..b4600e633d 100644 --- a/docs/sphinx/source/introtutorial.rst +++ b/docs/sphinx/source/introtutorial.rst @@ -58,7 +58,7 @@ includes irradiation, temperature and wind speed. for location in coordinates: latitude, longitude, name, altitude, timezone = location weather = pvlib.iotools.get_pvgis_tmy(latitude, longitude, - map_variables=True)[0] + map_variables=True)[0] weather.index.name = "utc_time" tmys.append(weather)