From 9e3ad63cdb030c6b369d9d822469bb968e2d1804 Mon Sep 17 00:00:00 2001 From: jbrockmendel Date: Sun, 12 Nov 2017 13:04:46 -0800 Subject: [PATCH] Move normalization funcs up to conversion (#18086) closes #17944 --- pandas/_libs/groupby.pyx | 1 + pandas/_libs/tslib.pyx | 149 +-------------- pandas/_libs/tslibs/conversion.pxd | 2 + pandas/_libs/tslibs/conversion.pyx | 239 ++++++++++++++++++++++--- pandas/_libs/tslibs/offsets.pyx | 4 +- pandas/core/indexes/datetimes.py | 33 ++-- pandas/tests/scalar/test_timestamp.py | 23 +-- pandas/tests/tseries/test_timezones.py | 22 +-- pandas/tseries/frequencies.py | 4 +- 9 files changed, 264 insertions(+), 213 deletions(-) diff --git a/pandas/_libs/groupby.pyx b/pandas/_libs/groupby.pyx index 2fbbc81c4b5a1..e1312a40971f0 100644 --- a/pandas/_libs/groupby.pyx +++ b/pandas/_libs/groupby.pyx @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # cython: profile=False cimport numpy as cnp diff --git a/pandas/_libs/tslib.pyx b/pandas/_libs/tslib.pyx index bf22a3a528259..b5285d158b1ed 100644 --- a/pandas/_libs/tslib.pyx +++ b/pandas/_libs/tslib.pyx @@ -97,9 +97,8 @@ from tslibs.conversion cimport (tz_convert_single, _TSObject, convert_to_tsobject, convert_datetime_to_tsobject, get_datetime64_nanos) -from tslibs.conversion import ( - tz_localize_to_utc, tz_convert, - tz_convert_single) +from tslibs.conversion import (tz_localize_to_utc, + tz_convert_single, date_normalize) from tslibs.nattype import NaT, nat_strings from tslibs.nattype cimport _checknull_with_nat @@ -1849,26 +1848,6 @@ cdef inline _to_i8(object val): return val -cpdef pydt_to_i8(object pydt): - """ - Convert to int64 representation compatible with numpy datetime64; converts - to UTC - """ - cdef: - _TSObject ts - - ts = convert_to_tsobject(pydt, None, None, 0, 0) - - return ts.value - - -def i8_to_pydt(int64_t i8, object tzinfo=None): - """ - Inverse of pydt_to_i8 - """ - return Timestamp(i8) - - # ---------------------------------------------------------------------- # Accessors @@ -1892,130 +1871,6 @@ def get_time_micros(ndarray[int64_t] dtindex): return micros -cdef int64_t DAY_NS = 86400000000000LL - - -@cython.wraparound(False) -@cython.boundscheck(False) -def date_normalize(ndarray[int64_t] stamps, tz=None): - cdef: - Py_ssize_t i, n = len(stamps) - pandas_datetimestruct dts - ndarray[int64_t] result = np.empty(n, dtype=np.int64) - - if tz is not None: - tz = maybe_get_tz(tz) - result = _normalize_local(stamps, tz) - else: - with nogil: - for i in range(n): - if stamps[i] == NPY_NAT: - result[i] = NPY_NAT - continue - dt64_to_dtstruct(stamps[i], &dts) - result[i] = _normalized_stamp(&dts) - - return result - - -@cython.wraparound(False) -@cython.boundscheck(False) -cdef _normalize_local(ndarray[int64_t] stamps, object tz): - cdef: - Py_ssize_t n = len(stamps) - ndarray[int64_t] result = np.empty(n, dtype=np.int64) - ndarray[int64_t] trans, deltas, pos - pandas_datetimestruct dts - - if is_utc(tz): - with nogil: - for i in range(n): - if stamps[i] == NPY_NAT: - result[i] = NPY_NAT - continue - dt64_to_dtstruct(stamps[i], &dts) - result[i] = _normalized_stamp(&dts) - elif is_tzlocal(tz): - for i in range(n): - if stamps[i] == NPY_NAT: - result[i] = NPY_NAT - continue - dt64_to_dtstruct(stamps[i], &dts) - dt = datetime(dts.year, dts.month, dts.day, dts.hour, - dts.min, dts.sec, dts.us, tz) - delta = int(get_utcoffset(tz, dt).total_seconds()) * 1000000000 - dt64_to_dtstruct(stamps[i] + delta, &dts) - result[i] = _normalized_stamp(&dts) - else: - # Adjust datetime64 timestamp, recompute datetimestruct - trans, deltas, typ = get_dst_info(tz) - - _pos = trans.searchsorted(stamps, side='right') - 1 - if _pos.dtype != np.int64: - _pos = _pos.astype(np.int64) - pos = _pos - - # statictzinfo - if typ not in ['pytz', 'dateutil']: - for i in range(n): - if stamps[i] == NPY_NAT: - result[i] = NPY_NAT - continue - dt64_to_dtstruct(stamps[i] + deltas[0], &dts) - result[i] = _normalized_stamp(&dts) - else: - for i in range(n): - if stamps[i] == NPY_NAT: - result[i] = NPY_NAT - continue - dt64_to_dtstruct(stamps[i] + deltas[pos[i]], &dts) - result[i] = _normalized_stamp(&dts) - - return result - -cdef inline int64_t _normalized_stamp(pandas_datetimestruct *dts) nogil: - dts.hour = 0 - dts.min = 0 - dts.sec = 0 - dts.us = 0 - dts.ps = 0 - return dtstruct_to_dt64(dts) - - -def dates_normalized(ndarray[int64_t] stamps, tz=None): - cdef: - Py_ssize_t i, n = len(stamps) - ndarray[int64_t] trans, deltas - pandas_datetimestruct dts - - if tz is None or is_utc(tz): - for i in range(n): - dt64_to_dtstruct(stamps[i], &dts) - if (dts.hour + dts.min + dts.sec + dts.us) > 0: - return False - elif is_tzlocal(tz): - for i in range(n): - dt64_to_dtstruct(stamps[i], &dts) - dt = datetime(dts.year, dts.month, dts.day, dts.hour, dts.min, - dts.sec, dts.us, tz) - dt = dt + tz.utcoffset(dt) - if (dt.hour + dt.minute + dt.second + dt.microsecond) > 0: - return False - else: - trans, deltas, typ = get_dst_info(tz) - - for i in range(n): - # Adjust datetime64 timestamp, recompute datetimestruct - pos = trans.searchsorted(stamps[i]) - 1 - inf = tz._transition_info[pos] - - dt64_to_dtstruct(stamps[i] + deltas[pos], &dts) - if (dts.hour + dts.min + dts.sec + dts.us) > 0: - return False - - return True - - # ---------------------------------------------------------------------- # Some general helper functions diff --git a/pandas/_libs/tslibs/conversion.pxd b/pandas/_libs/tslibs/conversion.pxd index 843a688a2630c..ad817ce8852f2 100644 --- a/pandas/_libs/tslibs/conversion.pxd +++ b/pandas/_libs/tslibs/conversion.pxd @@ -26,3 +26,5 @@ cdef void _localize_tso(_TSObject obj, object tz) cpdef int64_t tz_convert_single(int64_t val, object tz1, object tz2) cdef int64_t get_datetime64_nanos(object val) except? -1 + +cpdef int64_t pydt_to_i8(object pydt) except? -1 diff --git a/pandas/_libs/tslibs/conversion.pyx b/pandas/_libs/tslibs/conversion.pyx index 3775ab3417b63..c175769dc725e 100644 --- a/pandas/_libs/tslibs/conversion.pyx +++ b/pandas/_libs/tslibs/conversion.pyx @@ -53,7 +53,6 @@ UTC = pytz.UTC # ---------------------------------------------------------------------- # Misc Helpers - # TODO: How to declare np.datetime64 as the input type? cdef inline int64_t get_datetime64_nanos(object val) except? -1: """ @@ -90,6 +89,27 @@ cdef class _TSObject: return self.value +cpdef int64_t pydt_to_i8(object pydt) except? -1: + """ + Convert to int64 representation compatible with numpy datetime64; converts + to UTC + + Parameters + ---------- + pydt : object + + Returns + ------- + i8value : np.int64 + """ + cdef: + _TSObject ts + + ts = convert_to_tsobject(pydt, None, None, 0, 0) + + return ts.value + + cdef convert_to_tsobject(object ts, object tz, object unit, bint dayfirst, bint yearfirst): """ @@ -334,18 +354,18 @@ cdef inline void _localize_tso(_TSObject obj, object tz): Py_ssize_t delta, posn datetime dt + assert obj.tzinfo is None + if is_utc(tz): - obj.tzinfo = tz + pass + elif obj.value == NPY_NAT: + pass elif is_tzlocal(tz): dt64_to_dtstruct(obj.value, &obj.dts) dt = datetime(obj.dts.year, obj.dts.month, obj.dts.day, obj.dts.hour, obj.dts.min, obj.dts.sec, obj.dts.us, tz) delta = int(get_utcoffset(tz, dt).total_seconds()) * 1000000000 - if obj.value != NPY_NAT: - dt64_to_dtstruct(obj.value + delta, &obj.dts) - else: - dt64_to_dtstruct(obj.value, &obj.dts) - obj.tzinfo = tz + dt64_to_dtstruct(obj.value + delta, &obj.dts) else: # Adjust datetime64 timestamp, recompute datetimestruct trans, deltas, typ = get_dst_info(tz) @@ -355,26 +375,17 @@ cdef inline void _localize_tso(_TSObject obj, object tz): # static/pytz/dateutil specific code if is_fixed_offset(tz): # statictzinfo - if len(deltas) > 0 and obj.value != NPY_NAT: - dt64_to_dtstruct(obj.value + deltas[0], &obj.dts) - else: - dt64_to_dtstruct(obj.value, &obj.dts) - obj.tzinfo = tz + assert len(deltas) == 1, len(deltas) + dt64_to_dtstruct(obj.value + deltas[0], &obj.dts) elif treat_tz_as_pytz(tz): - inf = tz._transition_info[pos] - if obj.value != NPY_NAT: - dt64_to_dtstruct(obj.value + deltas[pos], &obj.dts) - else: - dt64_to_dtstruct(obj.value, &obj.dts) - obj.tzinfo = tz._tzinfos[inf] + tz = tz._tzinfos[tz._transition_info[pos]] + dt64_to_dtstruct(obj.value + deltas[pos], &obj.dts) elif treat_tz_as_dateutil(tz): - if obj.value != NPY_NAT: - dt64_to_dtstruct(obj.value + deltas[pos], &obj.dts) - else: - dt64_to_dtstruct(obj.value, &obj.dts) - obj.tzinfo = tz + dt64_to_dtstruct(obj.value + deltas[pos], &obj.dts) else: - obj.tzinfo = tz + pass + + obj.tzinfo = tz cdef inline datetime _localize_pydatetime(datetime dt, tzinfo tz): @@ -785,3 +796,183 @@ cdef inline str _render_tstamp(int64_t val): """ Helper function to render exception messages""" from pandas._libs.tslib import Timestamp return str(Timestamp(val)) + + +# ---------------------------------------------------------------------- +# Normalization + +@cython.wraparound(False) +@cython.boundscheck(False) +def date_normalize(ndarray[int64_t] stamps, tz=None): + """ + Normalize each of the (nanosecond) timestamps in the given array by + rounding down to the beginning of the day (i.e. midnight). If `tz` + is not None, then this is midnight for this timezone. + + Parameters + ---------- + stamps : int64 ndarray + tz : tzinfo or None + + Returns + ------- + result : int64 ndarray of converted of normalized nanosecond timestamps + """ + cdef: + Py_ssize_t i, n = len(stamps) + pandas_datetimestruct dts + ndarray[int64_t] result = np.empty(n, dtype=np.int64) + + if tz is not None: + tz = maybe_get_tz(tz) + result = _normalize_local(stamps, tz) + else: + with nogil: + for i in range(n): + if stamps[i] == NPY_NAT: + result[i] = NPY_NAT + continue + dt64_to_dtstruct(stamps[i], &dts) + result[i] = _normalized_stamp(&dts) + + return result + + +@cython.wraparound(False) +@cython.boundscheck(False) +cdef ndarray[int64_t] _normalize_local(ndarray[int64_t] stamps, object tz): + """ + Normalize each of the (nanosecond) timestamps in the given array by + rounding down to the beginning of the day (i.e. midnight) for the + given timezone `tz`. + + Parameters + ---------- + stamps : int64 ndarray + tz : tzinfo or None + + Returns + ------- + result : int64 ndarray of converted of normalized nanosecond timestamps + """ + cdef: + Py_ssize_t n = len(stamps) + ndarray[int64_t] result = np.empty(n, dtype=np.int64) + ndarray[int64_t] trans, deltas, pos + pandas_datetimestruct dts + datetime dt + + if is_utc(tz): + with nogil: + for i in range(n): + if stamps[i] == NPY_NAT: + result[i] = NPY_NAT + continue + dt64_to_dtstruct(stamps[i], &dts) + result[i] = _normalized_stamp(&dts) + elif is_tzlocal(tz): + for i in range(n): + if stamps[i] == NPY_NAT: + result[i] = NPY_NAT + continue + dt64_to_dtstruct(stamps[i], &dts) + dt = datetime(dts.year, dts.month, dts.day, dts.hour, + dts.min, dts.sec, dts.us, tz) + delta = int(get_utcoffset(tz, dt).total_seconds()) * 1000000000 + dt64_to_dtstruct(stamps[i] + delta, &dts) + result[i] = _normalized_stamp(&dts) + else: + # Adjust datetime64 timestamp, recompute datetimestruct + trans, deltas, typ = get_dst_info(tz) + + _pos = trans.searchsorted(stamps, side='right') - 1 + if _pos.dtype != np.int64: + _pos = _pos.astype(np.int64) + pos = _pos + + # statictzinfo + if typ not in ['pytz', 'dateutil']: + for i in range(n): + if stamps[i] == NPY_NAT: + result[i] = NPY_NAT + continue + dt64_to_dtstruct(stamps[i] + deltas[0], &dts) + result[i] = _normalized_stamp(&dts) + else: + for i in range(n): + if stamps[i] == NPY_NAT: + result[i] = NPY_NAT + continue + dt64_to_dtstruct(stamps[i] + deltas[pos[i]], &dts) + result[i] = _normalized_stamp(&dts) + + return result + + +cdef inline int64_t _normalized_stamp(pandas_datetimestruct *dts) nogil: + """ + Normalize the given datetimestruct to midnight, then convert to int64_t. + + Parameters + ---------- + *dts : pointer to pandas_datetimestruct + + Returns + ------- + stamp : int64 + """ + dts.hour = 0 + dts.min = 0 + dts.sec = 0 + dts.us = 0 + dts.ps = 0 + return dtstruct_to_dt64(dts) + + +def is_date_array_normalized(ndarray[int64_t] stamps, tz=None): + """ + Check if all of the given (nanosecond) timestamps are normalized to + midnight, i.e. hour == minute == second == 0. If the optional timezone + `tz` is not None, then this is midnight for this timezone. + + Parameters + ---------- + stamps : int64 ndarray + tz : tzinfo or None + + Returns + ------- + is_normalized : bool True if all stamps are normalized + """ + cdef: + Py_ssize_t i, n = len(stamps) + ndarray[int64_t] trans, deltas + pandas_datetimestruct dts + datetime dt + + if tz is None or is_utc(tz): + for i in range(n): + dt64_to_dtstruct(stamps[i], &dts) + if (dts.hour + dts.min + dts.sec + dts.us) > 0: + return False + elif is_tzlocal(tz): + for i in range(n): + dt64_to_dtstruct(stamps[i], &dts) + dt = datetime(dts.year, dts.month, dts.day, dts.hour, dts.min, + dts.sec, dts.us, tz) + dt = dt + tz.utcoffset(dt) + if (dt.hour + dt.minute + dt.second + dt.microsecond) > 0: + return False + else: + trans, deltas, typ = get_dst_info(tz) + + for i in range(n): + # Adjust datetime64 timestamp, recompute datetimestruct + pos = trans.searchsorted(stamps[i]) - 1 + inf = tz._transition_info[pos] + + dt64_to_dtstruct(stamps[i] + deltas[pos], &dts) + if (dts.hour + dts.min + dts.sec + dts.us) > 0: + return False + + return True diff --git a/pandas/_libs/tslibs/offsets.pyx b/pandas/_libs/tslibs/offsets.pyx index c64b6568a0495..2d8ce4c59fedc 100644 --- a/pandas/_libs/tslibs/offsets.pyx +++ b/pandas/_libs/tslibs/offsets.pyx @@ -15,10 +15,10 @@ np.import_array() from util cimport is_string_object, is_integer_object -from pandas._libs.tslib import pydt_to_i8, monthrange +from pandas._libs.tslib import monthrange +from conversion cimport tz_convert_single, pydt_to_i8 from frequencies cimport get_freq_code -from conversion cimport tz_convert_single # --------------------------------------------------------------------- # Constants diff --git a/pandas/core/indexes/datetimes.py b/pandas/core/indexes/datetimes.py index aa99e8920d9b5..2e022cb151008 100644 --- a/pandas/core/indexes/datetimes.py +++ b/pandas/core/indexes/datetimes.py @@ -55,8 +55,7 @@ from pandas._libs import (lib, index as libindex, tslib as libts, algos as libalgos, join as libjoin, Timestamp, period as libperiod) -from pandas._libs.tslibs import timezones - +from pandas._libs.tslibs import timezones, conversion # -------- some conversion wrapper functions @@ -384,8 +383,8 @@ def __new__(cls, data=None, getattr(data, 'tz', None) is None): # Convert tz-naive to UTC ints = subarr.view('i8') - subarr = libts.tz_localize_to_utc(ints, tz, - ambiguous=ambiguous) + subarr = conversion.tz_localize_to_utc(ints, tz, + ambiguous=ambiguous) subarr = subarr.view(_NS_DTYPE) subarr = cls._simple_new(subarr, name=name, freq=freq, tz=tz) @@ -531,8 +530,8 @@ def _generate(cls, start, end, periods, name, offset, index = _generate_regular_range(start, end, periods, offset) if tz is not None and getattr(index, 'tz', None) is None: - index = libts.tz_localize_to_utc(_ensure_int64(index), tz, - ambiguous=ambiguous) + index = conversion.tz_localize_to_utc(_ensure_int64(index), tz, + ambiguous=ambiguous) index = index.view(_NS_DTYPE) # index is localized datetime64 array -> have to convert @@ -561,11 +560,11 @@ def _convert_for_op(self, value): def _local_timestamps(self): if self.is_monotonic: - return libts.tz_convert(self.asi8, utc, self.tz) + return conversion.tz_convert(self.asi8, utc, self.tz) else: values = self.asi8 indexer = values.argsort() - result = libts.tz_convert(values.take(indexer), utc, self.tz) + result = conversion.tz_convert(values.take(indexer), utc, self.tz) n = len(indexer) reverse = np.empty(n, dtype=np.int_) @@ -1644,7 +1643,7 @@ def normalize(self): ------- normalized : DatetimeIndex """ - new_values = libts.date_normalize(self.asi8, self.tz) + new_values = conversion.date_normalize(self.asi8, self.tz) return DatetimeIndex(new_values, freq='infer', name=self.name, tz=self.tz) @@ -1683,7 +1682,7 @@ def is_normalized(self): """ Returns True if all of the dates are at midnight ("no time") """ - return libts.dates_normalized(self.asi8, self.tz) + return conversion.is_date_array_normalized(self.asi8, self.tz) @cache_readonly def _resolution(self): @@ -1724,7 +1723,7 @@ def insert(self, loc, item): new_dates = np.concatenate((self[:loc].asi8, [item.view(np.int64)], self[loc:].asi8)) if self.tz is not None: - new_dates = libts.tz_convert(new_dates, 'UTC', self.tz) + new_dates = conversion.tz_convert(new_dates, 'UTC', self.tz) return DatetimeIndex(new_dates, name=self.name, freq=freq, tz=self.tz) @@ -1764,7 +1763,7 @@ def delete(self, loc): freq = self.freq if self.tz is not None: - new_dates = libts.tz_convert(new_dates, 'UTC', self.tz) + new_dates = conversion.tz_convert(new_dates, 'UTC', self.tz) return DatetimeIndex(new_dates, name=self.name, freq=freq, tz=self.tz) def tz_convert(self, tz): @@ -1844,16 +1843,16 @@ def tz_localize(self, tz, ambiguous='raise', errors='raise'): """ if self.tz is not None: if tz is None: - new_dates = libts.tz_convert(self.asi8, 'UTC', self.tz) + new_dates = conversion.tz_convert(self.asi8, 'UTC', self.tz) else: raise TypeError("Already tz-aware, use tz_convert to convert.") else: tz = timezones.maybe_get_tz(tz) # Convert to UTC - new_dates = libts.tz_localize_to_utc(self.asi8, tz, - ambiguous=ambiguous, - errors=errors) + new_dates = conversion.tz_localize_to_utc(self.asi8, tz, + ambiguous=ambiguous, + errors=errors) new_dates = new_dates.view(_NS_DTYPE) return self._shallow_copy(new_dates, tz=tz) @@ -2194,7 +2193,7 @@ def _to_m8(key, tz=None): # this also converts strings key = Timestamp(key, tz=tz) - return np.int64(libts.pydt_to_i8(key)).view(_NS_DTYPE) + return np.int64(conversion.pydt_to_i8(key)).view(_NS_DTYPE) _CACHE_START = Timestamp(datetime(1950, 1, 1)) diff --git a/pandas/tests/scalar/test_timestamp.py b/pandas/tests/scalar/test_timestamp.py index 1d1eeb9da2364..a79fb554f9454 100644 --- a/pandas/tests/scalar/test_timestamp.py +++ b/pandas/tests/scalar/test_timestamp.py @@ -16,8 +16,9 @@ import pandas.util.testing as tm from pandas.tseries import offsets, frequencies -from pandas._libs import tslib, period +from pandas._libs import period from pandas._libs.tslibs.timezones import get_timezone +from pandas._libs.tslibs import conversion from pandas.compat import lrange, long, PY3 from pandas.util.testing import assert_series_equal @@ -77,12 +78,12 @@ def test_constructor(self): for result in [Timestamp(date_str), Timestamp(date)]: # only with timestring assert result.value == expected - assert tslib.pydt_to_i8(result) == expected + assert conversion.pydt_to_i8(result) == expected # re-creation shouldn't affect to internal value result = Timestamp(result) assert result.value == expected - assert tslib.pydt_to_i8(result) == expected + assert conversion.pydt_to_i8(result) == expected # with timezone for tz, offset in timezones: @@ -90,18 +91,18 @@ def test_constructor(self): tz=tz)]: expected_tz = expected - offset * 3600 * 1000000000 assert result.value == expected_tz - assert tslib.pydt_to_i8(result) == expected_tz + assert conversion.pydt_to_i8(result) == expected_tz # should preserve tz result = Timestamp(result) assert result.value == expected_tz - assert tslib.pydt_to_i8(result) == expected_tz + assert conversion.pydt_to_i8(result) == expected_tz # should convert to UTC result = Timestamp(result, tz='UTC') expected_utc = expected - offset * 3600 * 1000000000 assert result.value == expected_utc - assert tslib.pydt_to_i8(result) == expected_utc + assert conversion.pydt_to_i8(result) == expected_utc def test_constructor_with_stringoffset(self): # GH 7833 @@ -129,30 +130,30 @@ def test_constructor_with_stringoffset(self): for result in [Timestamp(date_str)]: # only with timestring assert result.value == expected - assert tslib.pydt_to_i8(result) == expected + assert conversion.pydt_to_i8(result) == expected # re-creation shouldn't affect to internal value result = Timestamp(result) assert result.value == expected - assert tslib.pydt_to_i8(result) == expected + assert conversion.pydt_to_i8(result) == expected # with timezone for tz, offset in timezones: result = Timestamp(date_str, tz=tz) expected_tz = expected assert result.value == expected_tz - assert tslib.pydt_to_i8(result) == expected_tz + assert conversion.pydt_to_i8(result) == expected_tz # should preserve tz result = Timestamp(result) assert result.value == expected_tz - assert tslib.pydt_to_i8(result) == expected_tz + assert conversion.pydt_to_i8(result) == expected_tz # should convert to UTC result = Timestamp(result, tz='UTC') expected_utc = expected assert result.value == expected_utc - assert tslib.pydt_to_i8(result) == expected_utc + assert conversion.pydt_to_i8(result) == expected_utc # This should be 2013-11-01 05:00 in UTC # converted to Chicago tz diff --git a/pandas/tests/tseries/test_timezones.py b/pandas/tests/tseries/test_timezones.py index 724628649796d..3dfad2d4af75e 100644 --- a/pandas/tests/tseries/test_timezones.py +++ b/pandas/tests/tseries/test_timezones.py @@ -17,7 +17,7 @@ from pandas.core.indexes.datetimes import bdate_range, date_range from pandas.core.dtypes.dtypes import DatetimeTZDtype from pandas._libs import tslib -from pandas._libs.tslibs import timezones +from pandas._libs.tslibs import timezones, conversion from pandas import (Index, Series, DataFrame, isna, Timestamp, NaT, DatetimeIndex, to_datetime) from pandas.util.testing import (assert_frame_equal, assert_series_equal, @@ -1738,14 +1738,14 @@ class TestTslib(object): def test_tslib_tz_convert(self): def compare_utc_to_local(tz_didx, utc_didx): - f = lambda x: tslib.tz_convert_single(x, 'UTC', tz_didx.tz) - result = tslib.tz_convert(tz_didx.asi8, 'UTC', tz_didx.tz) + f = lambda x: conversion.tz_convert_single(x, 'UTC', tz_didx.tz) + result = conversion.tz_convert(tz_didx.asi8, 'UTC', tz_didx.tz) result_single = np.vectorize(f)(tz_didx.asi8) tm.assert_numpy_array_equal(result, result_single) def compare_local_to_utc(tz_didx, utc_didx): - f = lambda x: tslib.tz_convert_single(x, tz_didx.tz, 'UTC') - result = tslib.tz_convert(utc_didx.asi8, tz_didx.tz, 'UTC') + f = lambda x: conversion.tz_convert_single(x, tz_didx.tz, 'UTC') + result = conversion.tz_convert(utc_didx.asi8, tz_didx.tz, 'UTC') result_single = np.vectorize(f)(utc_didx.asi8) tm.assert_numpy_array_equal(result, result_single) @@ -1770,14 +1770,14 @@ def compare_local_to_utc(tz_didx, utc_didx): compare_local_to_utc(tz_didx, utc_didx) # Check empty array - result = tslib.tz_convert(np.array([], dtype=np.int64), - timezones.maybe_get_tz('US/Eastern'), - timezones.maybe_get_tz('Asia/Tokyo')) + result = conversion.tz_convert(np.array([], dtype=np.int64), + timezones.maybe_get_tz('US/Eastern'), + timezones.maybe_get_tz('Asia/Tokyo')) tm.assert_numpy_array_equal(result, np.array([], dtype=np.int64)) # Check all-NaT array - result = tslib.tz_convert(np.array([tslib.iNaT], dtype=np.int64), - timezones.maybe_get_tz('US/Eastern'), - timezones.maybe_get_tz('Asia/Tokyo')) + result = conversion.tz_convert(np.array([tslib.iNaT], dtype=np.int64), + timezones.maybe_get_tz('US/Eastern'), + timezones.maybe_get_tz('Asia/Tokyo')) tm.assert_numpy_array_equal(result, np.array( [tslib.iNaT], dtype=np.int64)) diff --git a/pandas/tseries/frequencies.py b/pandas/tseries/frequencies.py index be25a439f9075..128dd51a2abea 100644 --- a/pandas/tseries/frequencies.py +++ b/pandas/tseries/frequencies.py @@ -21,6 +21,7 @@ from pandas._libs import lib, tslib from pandas._libs.tslib import Timedelta +from pandas._libs.tslibs import conversion from pandas._libs.tslibs.frequencies import ( # noqa get_freq_code, _base_and_stride, _period_str_to_code, _INVALID_FREQ_ERROR, opattern, _lite_rule_alias, _dont_uppercase, @@ -583,7 +584,8 @@ def __init__(self, index, warn=True): # the timezone so they are in local time if hasattr(index, 'tz'): if index.tz is not None: - self.values = tslib.tz_convert(self.values, 'UTC', index.tz) + self.values = conversion.tz_convert(self.values, + 'UTC', index.tz) self.warn = warn