8th day of python challenges 111-117
This commit is contained in:
@@ -0,0 +1,46 @@
|
||||
"""Tests that the tslibs API is locked down"""
|
||||
|
||||
from pandas._libs import tslibs
|
||||
|
||||
|
||||
def test_namespace():
|
||||
|
||||
submodules = [
|
||||
"c_timestamp",
|
||||
"ccalendar",
|
||||
"conversion",
|
||||
"fields",
|
||||
"frequencies",
|
||||
"nattype",
|
||||
"np_datetime",
|
||||
"offsets",
|
||||
"parsing",
|
||||
"period",
|
||||
"resolution",
|
||||
"strptime",
|
||||
"timedeltas",
|
||||
"timestamps",
|
||||
"timezones",
|
||||
"tzconversion",
|
||||
]
|
||||
|
||||
api = [
|
||||
"NaT",
|
||||
"NaTType",
|
||||
"iNaT",
|
||||
"is_null_datetimelike",
|
||||
"OutOfBoundsDatetime",
|
||||
"Period",
|
||||
"IncompatibleFrequency",
|
||||
"Timedelta",
|
||||
"Timestamp",
|
||||
"delta_to_nanoseconds",
|
||||
"ints_to_pytimedelta",
|
||||
"localize_pydatetime",
|
||||
"normalize_date",
|
||||
"tz_convert_single",
|
||||
]
|
||||
|
||||
expected = set(submodules + api)
|
||||
names = [x for x in dir(tslibs) if not x.startswith("__")]
|
||||
assert set(names) == expected
|
@@ -0,0 +1,197 @@
|
||||
from datetime import date, datetime
|
||||
|
||||
from dateutil.tz.tz import tzoffset
|
||||
import numpy as np
|
||||
import pytest
|
||||
import pytz
|
||||
|
||||
from pandas._libs import iNaT, tslib
|
||||
from pandas.compat.numpy import np_array_datetime64_compat
|
||||
|
||||
from pandas import Timestamp
|
||||
import pandas.util.testing as tm
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"data,expected",
|
||||
[
|
||||
(
|
||||
["01-01-2013", "01-02-2013"],
|
||||
[
|
||||
"2013-01-01T00:00:00.000000000-0000",
|
||||
"2013-01-02T00:00:00.000000000-0000",
|
||||
],
|
||||
),
|
||||
(
|
||||
["Mon Sep 16 2013", "Tue Sep 17 2013"],
|
||||
[
|
||||
"2013-09-16T00:00:00.000000000-0000",
|
||||
"2013-09-17T00:00:00.000000000-0000",
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parsing_valid_dates(data, expected):
|
||||
arr = np.array(data, dtype=object)
|
||||
result, _ = tslib.array_to_datetime(arr)
|
||||
|
||||
expected = np_array_datetime64_compat(expected, dtype="M8[ns]")
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt_string, expected_tz",
|
||||
[
|
||||
["01-01-2013 08:00:00+08:00", 480],
|
||||
["2013-01-01T08:00:00.000000000+0800", 480],
|
||||
["2012-12-31T16:00:00.000000000-0800", -480],
|
||||
["12-31-2012 23:00:00-01:00", -60],
|
||||
],
|
||||
)
|
||||
def test_parsing_timezone_offsets(dt_string, expected_tz):
|
||||
# All of these datetime strings with offsets are equivalent
|
||||
# to the same datetime after the timezone offset is added.
|
||||
arr = np.array(["01-01-2013 00:00:00"], dtype=object)
|
||||
expected, _ = tslib.array_to_datetime(arr)
|
||||
|
||||
arr = np.array([dt_string], dtype=object)
|
||||
result, result_tz = tslib.array_to_datetime(arr)
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
assert result_tz is pytz.FixedOffset(expected_tz)
|
||||
|
||||
|
||||
def test_parsing_non_iso_timezone_offset():
|
||||
dt_string = "01-01-2013T00:00:00.000000000+0000"
|
||||
arr = np.array([dt_string], dtype=object)
|
||||
|
||||
result, result_tz = tslib.array_to_datetime(arr)
|
||||
expected = np.array([np.datetime64("2013-01-01 00:00:00.000000000")])
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
assert result_tz is pytz.FixedOffset(0)
|
||||
|
||||
|
||||
def test_parsing_different_timezone_offsets():
|
||||
# see gh-17697
|
||||
data = ["2015-11-18 15:30:00+05:30", "2015-11-18 15:30:00+06:30"]
|
||||
data = np.array(data, dtype=object)
|
||||
|
||||
result, result_tz = tslib.array_to_datetime(data)
|
||||
expected = np.array(
|
||||
[
|
||||
datetime(2015, 11, 18, 15, 30, tzinfo=tzoffset(None, 19800)),
|
||||
datetime(2015, 11, 18, 15, 30, tzinfo=tzoffset(None, 23400)),
|
||||
],
|
||||
dtype=object,
|
||||
)
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
assert result_tz is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"data", [["-352.737091", "183.575577"], ["1", "2", "3", "4", "5"]]
|
||||
)
|
||||
def test_number_looking_strings_not_into_datetime(data):
|
||||
# see gh-4601
|
||||
#
|
||||
# These strings don't look like datetimes, so
|
||||
# they shouldn't be attempted to be converted.
|
||||
arr = np.array(data, dtype=object)
|
||||
result, _ = tslib.array_to_datetime(arr, errors="ignore")
|
||||
|
||||
tm.assert_numpy_array_equal(result, arr)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_date",
|
||||
[
|
||||
date(1000, 1, 1),
|
||||
datetime(1000, 1, 1),
|
||||
"1000-01-01",
|
||||
"Jan 1, 1000",
|
||||
np.datetime64("1000-01-01"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("errors", ["coerce", "raise"])
|
||||
def test_coerce_outside_ns_bounds(invalid_date, errors):
|
||||
arr = np.array([invalid_date], dtype="object")
|
||||
kwargs = dict(values=arr, errors=errors)
|
||||
|
||||
if errors == "raise":
|
||||
msg = "Out of bounds nanosecond timestamp"
|
||||
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
tslib.array_to_datetime(**kwargs)
|
||||
else: # coerce.
|
||||
result, _ = tslib.array_to_datetime(**kwargs)
|
||||
expected = np.array([iNaT], dtype="M8[ns]")
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
|
||||
|
||||
def test_coerce_outside_ns_bounds_one_valid():
|
||||
arr = np.array(["1/1/1000", "1/1/2000"], dtype=object)
|
||||
result, _ = tslib.array_to_datetime(arr, errors="coerce")
|
||||
|
||||
expected = [iNaT, "2000-01-01T00:00:00.000000000-0000"]
|
||||
expected = np_array_datetime64_compat(expected, dtype="M8[ns]")
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("errors", ["ignore", "coerce"])
|
||||
def test_coerce_of_invalid_datetimes(errors):
|
||||
arr = np.array(["01-01-2013", "not_a_date", "1"], dtype=object)
|
||||
kwargs = dict(values=arr, errors=errors)
|
||||
|
||||
if errors == "ignore":
|
||||
# Without coercing, the presence of any invalid
|
||||
# dates prevents any values from being converted.
|
||||
result, _ = tslib.array_to_datetime(**kwargs)
|
||||
tm.assert_numpy_array_equal(result, arr)
|
||||
else: # coerce.
|
||||
# With coercing, the invalid dates becomes iNaT
|
||||
result, _ = tslib.array_to_datetime(arr, errors="coerce")
|
||||
expected = ["2013-01-01T00:00:00.000000000-0000", iNaT, iNaT]
|
||||
|
||||
tm.assert_numpy_array_equal(
|
||||
result, np_array_datetime64_compat(expected, dtype="M8[ns]")
|
||||
)
|
||||
|
||||
|
||||
def test_to_datetime_barely_out_of_bounds():
|
||||
# see gh-19382, gh-19529
|
||||
#
|
||||
# Close enough to bounds that dropping nanos
|
||||
# would result in an in-bounds datetime.
|
||||
arr = np.array(["2262-04-11 23:47:16.854775808"], dtype=object)
|
||||
msg = "Out of bounds nanosecond timestamp: 2262-04-11 23:47:16"
|
||||
|
||||
with pytest.raises(tslib.OutOfBoundsDatetime, match=msg):
|
||||
tslib.array_to_datetime(arr)
|
||||
|
||||
|
||||
class SubDatetime(datetime):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"data,expected",
|
||||
[
|
||||
([SubDatetime(2000, 1, 1)], ["2000-01-01T00:00:00.000000000-0000"]),
|
||||
([datetime(2000, 1, 1)], ["2000-01-01T00:00:00.000000000-0000"]),
|
||||
([Timestamp(2000, 1, 1)], ["2000-01-01T00:00:00.000000000-0000"]),
|
||||
],
|
||||
)
|
||||
def test_datetime_subclass(data, expected):
|
||||
# GH 25851
|
||||
# ensure that subclassed datetime works with
|
||||
# array_to_datetime
|
||||
|
||||
arr = np.array(data, dtype=object)
|
||||
result, _ = tslib.array_to_datetime(arr)
|
||||
|
||||
expected = np_array_datetime64_compat(expected, dtype="M8[ns]")
|
||||
tm.assert_numpy_array_equal(result, expected)
|
@@ -0,0 +1,27 @@
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from pandas._libs.tslibs import ccalendar
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_tuple,expected",
|
||||
[
|
||||
((2001, 3, 1), 60),
|
||||
((2004, 3, 1), 61),
|
||||
((1907, 12, 31), 365), # End-of-year, non-leap year.
|
||||
((2004, 12, 31), 366), # End-of-year, leap year.
|
||||
],
|
||||
)
|
||||
def test_get_day_of_year_numeric(date_tuple, expected):
|
||||
assert ccalendar.get_day_of_year(*date_tuple) == expected
|
||||
|
||||
|
||||
def test_get_day_of_year_dt():
|
||||
dt = datetime.fromordinal(1 + np.random.randint(365 * 4000))
|
||||
result = ccalendar.get_day_of_year(dt.year, dt.month, dt.day)
|
||||
|
||||
expected = (dt - dt.replace(month=1, day=1)).days + 1
|
||||
assert result == expected
|
@@ -0,0 +1,100 @@
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from pytz import UTC
|
||||
|
||||
from pandas._libs.tslib import iNaT
|
||||
from pandas._libs.tslibs import conversion, timezones, tzconversion
|
||||
|
||||
from pandas import Timestamp, date_range
|
||||
import pandas.util.testing as tm
|
||||
|
||||
|
||||
def _compare_utc_to_local(tz_didx):
|
||||
def f(x):
|
||||
return conversion.tz_convert_single(x, UTC, tz_didx.tz)
|
||||
|
||||
result = tzconversion.tz_convert(tz_didx.asi8, UTC, tz_didx.tz)
|
||||
expected = np.vectorize(f)(tz_didx.asi8)
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
|
||||
|
||||
def _compare_local_to_utc(tz_didx, utc_didx):
|
||||
def f(x):
|
||||
return conversion.tz_convert_single(x, tz_didx.tz, UTC)
|
||||
|
||||
result = tzconversion.tz_convert(utc_didx.asi8, tz_didx.tz, UTC)
|
||||
expected = np.vectorize(f)(utc_didx.asi8)
|
||||
|
||||
tm.assert_numpy_array_equal(result, expected)
|
||||
|
||||
|
||||
def test_tz_convert_single_matches_tz_convert_hourly(tz_aware_fixture):
|
||||
tz = tz_aware_fixture
|
||||
tz_didx = date_range("2014-03-01", "2015-01-10", freq="H", tz=tz)
|
||||
utc_didx = date_range("2014-03-01", "2015-01-10", freq="H")
|
||||
|
||||
_compare_utc_to_local(tz_didx)
|
||||
_compare_local_to_utc(tz_didx, utc_didx)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("freq", ["D", "A"])
|
||||
def test_tz_convert_single_matches_tz_convert(tz_aware_fixture, freq):
|
||||
tz = tz_aware_fixture
|
||||
tz_didx = date_range("2000-01-01", "2020-01-01", freq=freq, tz=tz)
|
||||
utc_didx = date_range("2000-01-01", "2020-01-01", freq=freq)
|
||||
|
||||
_compare_utc_to_local(tz_didx)
|
||||
_compare_local_to_utc(tz_didx, utc_didx)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arr",
|
||||
[
|
||||
pytest.param(np.array([], dtype=np.int64), id="empty"),
|
||||
pytest.param(np.array([iNaT], dtype=np.int64), id="all_nat"),
|
||||
],
|
||||
)
|
||||
def test_tz_convert_corner(arr):
|
||||
result = tzconversion.tz_convert(
|
||||
arr, timezones.maybe_get_tz("US/Eastern"), timezones.maybe_get_tz("Asia/Tokyo")
|
||||
)
|
||||
tm.assert_numpy_array_equal(result, arr)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("copy", [True, False])
|
||||
@pytest.mark.parametrize("dtype", ["M8[ns]", "M8[s]"])
|
||||
def test_length_zero_copy(dtype, copy):
|
||||
arr = np.array([], dtype=dtype)
|
||||
result = conversion.ensure_datetime64ns(arr, copy=copy)
|
||||
assert result.base is (None if copy else arr)
|
||||
|
||||
|
||||
class SubDatetime(datetime):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt, expected",
|
||||
[
|
||||
pytest.param(
|
||||
Timestamp("2000-01-01"), Timestamp("2000-01-01", tz=UTC), id="timestamp"
|
||||
),
|
||||
pytest.param(
|
||||
datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=UTC), id="datetime"
|
||||
),
|
||||
pytest.param(
|
||||
SubDatetime(2000, 1, 1),
|
||||
SubDatetime(2000, 1, 1, tzinfo=UTC),
|
||||
id="subclassed_datetime",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_localize_pydatetime_dt_types(dt, expected):
|
||||
# GH 25851
|
||||
# ensure that subclassed datetime works with
|
||||
# localize_pydatetime
|
||||
result = conversion.localize_pydatetime(dt, UTC)
|
||||
assert result == expected
|
@@ -0,0 +1,104 @@
|
||||
import pytest
|
||||
|
||||
from pandas._libs.tslibs.frequencies import (
|
||||
INVALID_FREQ_ERR_MSG,
|
||||
_period_str_to_code,
|
||||
get_rule_month,
|
||||
is_subperiod,
|
||||
is_superperiod,
|
||||
)
|
||||
|
||||
from pandas.tseries import offsets
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"obj,expected",
|
||||
[
|
||||
("W", "DEC"),
|
||||
(offsets.Week(), "DEC"),
|
||||
("D", "DEC"),
|
||||
(offsets.Day(), "DEC"),
|
||||
("Q", "DEC"),
|
||||
(offsets.QuarterEnd(startingMonth=12), "DEC"),
|
||||
("Q-JAN", "JAN"),
|
||||
(offsets.QuarterEnd(startingMonth=1), "JAN"),
|
||||
("A-DEC", "DEC"),
|
||||
("Y-DEC", "DEC"),
|
||||
(offsets.YearEnd(), "DEC"),
|
||||
("A-MAY", "MAY"),
|
||||
("Y-MAY", "MAY"),
|
||||
(offsets.YearEnd(month=5), "MAY"),
|
||||
],
|
||||
)
|
||||
def test_get_rule_month(obj, expected):
|
||||
result = get_rule_month(obj)
|
||||
assert result == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"obj,expected",
|
||||
[
|
||||
("A", 1000),
|
||||
("A-DEC", 1000),
|
||||
("A-JAN", 1001),
|
||||
("Y", 1000),
|
||||
("Y-DEC", 1000),
|
||||
("Y-JAN", 1001),
|
||||
("Q", 2000),
|
||||
("Q-DEC", 2000),
|
||||
("Q-FEB", 2002),
|
||||
("W", 4000),
|
||||
("W-SUN", 4000),
|
||||
("W-FRI", 4005),
|
||||
("Min", 8000),
|
||||
("ms", 10000),
|
||||
("US", 11000),
|
||||
("NS", 12000),
|
||||
],
|
||||
)
|
||||
def test_period_str_to_code(obj, expected):
|
||||
assert _period_str_to_code(obj) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"p1,p2,expected",
|
||||
[
|
||||
# Input validation.
|
||||
(offsets.MonthEnd(), None, False),
|
||||
(offsets.YearEnd(), None, False),
|
||||
(None, offsets.YearEnd(), False),
|
||||
(None, offsets.MonthEnd(), False),
|
||||
(None, None, False),
|
||||
(offsets.YearEnd(), offsets.MonthEnd(), True),
|
||||
(offsets.Hour(), offsets.Minute(), True),
|
||||
(offsets.Second(), offsets.Milli(), True),
|
||||
(offsets.Milli(), offsets.Micro(), True),
|
||||
(offsets.Micro(), offsets.Nano(), True),
|
||||
],
|
||||
)
|
||||
def test_super_sub_symmetry(p1, p2, expected):
|
||||
assert is_superperiod(p1, p2) is expected
|
||||
assert is_subperiod(p2, p1) is expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"freq,expected,aliases",
|
||||
[
|
||||
("D", 6000, ["DAY", "DLY", "DAILY"]),
|
||||
("M", 3000, ["MTH", "MONTH", "MONTHLY"]),
|
||||
("N", 12000, ["NANOSECOND", "NANOSECONDLY"]),
|
||||
("H", 7000, ["HR", "HOUR", "HRLY", "HOURLY"]),
|
||||
("T", 8000, ["minute", "MINUTE", "MINUTELY"]),
|
||||
("L", 10000, ["MILLISECOND", "MILLISECONDLY"]),
|
||||
("U", 11000, ["MICROSECOND", "MICROSECONDLY"]),
|
||||
("S", 9000, ["sec", "SEC", "SECOND", "SECONDLY"]),
|
||||
("B", 5000, ["BUS", "BUSINESS", "BUSINESSLY", "WEEKDAY"]),
|
||||
],
|
||||
)
|
||||
def test_assert_aliases_deprecated(freq, expected, aliases):
|
||||
assert isinstance(aliases, list)
|
||||
assert _period_str_to_code(freq) == expected
|
||||
|
||||
for alias in aliases:
|
||||
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
||||
_period_str_to_code(alias)
|
@@ -0,0 +1,169 @@
|
||||
"""
|
||||
Tests for helper functions in the cython tslibs.offsets
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
import pandas._libs.tslibs.offsets as liboffsets
|
||||
from pandas._libs.tslibs.offsets import roll_qtrday
|
||||
|
||||
from pandas import Timestamp
|
||||
|
||||
|
||||
@pytest.fixture(params=["start", "end", "business_start", "business_end"])
|
||||
def day_opt(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt,exp_week_day,exp_last_day",
|
||||
[
|
||||
(datetime(2017, 11, 30), 3, 30), # Business day.
|
||||
(datetime(1993, 10, 31), 6, 29), # Non-business day.
|
||||
],
|
||||
)
|
||||
def test_get_last_bday(dt, exp_week_day, exp_last_day):
|
||||
assert dt.weekday() == exp_week_day
|
||||
assert liboffsets.get_lastbday(dt.year, dt.month) == exp_last_day
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt,exp_week_day,exp_first_day",
|
||||
[
|
||||
(datetime(2017, 4, 1), 5, 3), # Non-weekday.
|
||||
(datetime(1993, 10, 1), 4, 1), # Business day.
|
||||
],
|
||||
)
|
||||
def test_get_first_bday(dt, exp_week_day, exp_first_day):
|
||||
assert dt.weekday() == exp_week_day
|
||||
assert liboffsets.get_firstbday(dt.year, dt.month) == exp_first_day
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"months,day_opt,expected",
|
||||
[
|
||||
(0, 15, datetime(2017, 11, 15)),
|
||||
(0, None, datetime(2017, 11, 30)),
|
||||
(1, "start", datetime(2017, 12, 1)),
|
||||
(-145, "end", datetime(2005, 10, 31)),
|
||||
(0, "business_end", datetime(2017, 11, 30)),
|
||||
(0, "business_start", datetime(2017, 11, 1)),
|
||||
],
|
||||
)
|
||||
def test_shift_month_dt(months, day_opt, expected):
|
||||
dt = datetime(2017, 11, 30)
|
||||
assert liboffsets.shift_month(dt, months, day_opt=day_opt) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"months,day_opt,expected",
|
||||
[
|
||||
(1, "start", Timestamp("1929-06-01")),
|
||||
(-3, "end", Timestamp("1929-02-28")),
|
||||
(25, None, Timestamp("1931-06-5")),
|
||||
(-1, 31, Timestamp("1929-04-30")),
|
||||
],
|
||||
)
|
||||
def test_shift_month_ts(months, day_opt, expected):
|
||||
ts = Timestamp("1929-05-05")
|
||||
assert liboffsets.shift_month(ts, months, day_opt=day_opt) == expected
|
||||
|
||||
|
||||
def test_shift_month_error():
|
||||
dt = datetime(2017, 11, 15)
|
||||
day_opt = "this should raise"
|
||||
|
||||
with pytest.raises(ValueError, match=day_opt):
|
||||
liboffsets.shift_month(dt, 3, day_opt=day_opt)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"other,expected",
|
||||
[
|
||||
# Before March 1.
|
||||
(datetime(2017, 2, 10), {2: 1, -7: -7, 0: 0}),
|
||||
# After March 1.
|
||||
(Timestamp("2014-03-15", tz="US/Eastern"), {2: 2, -7: -6, 0: 1}),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("n", [2, -7, 0])
|
||||
def test_roll_yearday(other, expected, n):
|
||||
month = 3
|
||||
day_opt = "start" # `other` will be compared to March 1.
|
||||
|
||||
assert liboffsets.roll_yearday(other, n, month, day_opt) == expected[n]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"other,expected",
|
||||
[
|
||||
# Before June 30.
|
||||
(datetime(1999, 6, 29), {5: 4, -7: -7, 0: 0}),
|
||||
# After June 30.
|
||||
(Timestamp(2072, 8, 24, 6, 17, 18), {5: 5, -7: -6, 0: 1}),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("n", [5, -7, 0])
|
||||
def test_roll_yearday2(other, expected, n):
|
||||
month = 6
|
||||
day_opt = "end" # `other` will be compared to June 30.
|
||||
|
||||
assert liboffsets.roll_yearday(other, n, month, day_opt) == expected[n]
|
||||
|
||||
|
||||
def test_get_day_of_month_error():
|
||||
# get_day_of_month is not directly exposed.
|
||||
# We test it via roll_yearday.
|
||||
dt = datetime(2017, 11, 15)
|
||||
day_opt = "foo"
|
||||
|
||||
with pytest.raises(ValueError, match=day_opt):
|
||||
# To hit the raising case we need month == dt.month and n > 0.
|
||||
liboffsets.roll_yearday(dt, n=3, month=11, day_opt=day_opt)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"month",
|
||||
[3, 5], # (other.month % 3) < (month % 3) # (other.month % 3) > (month % 3)
|
||||
)
|
||||
@pytest.mark.parametrize("n", [4, -3])
|
||||
def test_roll_qtr_day_not_mod_unequal(day_opt, month, n):
|
||||
expected = {3: {-3: -2, 4: 4}, 5: {-3: -3, 4: 3}}
|
||||
|
||||
other = Timestamp(2072, 10, 1, 6, 17, 18) # Saturday.
|
||||
assert roll_qtrday(other, n, month, day_opt, modby=3) == expected[month][n]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"other,month,exp_dict",
|
||||
[
|
||||
# Monday.
|
||||
(datetime(1999, 5, 31), 2, {-1: {"start": 0, "business_start": 0}}),
|
||||
# Saturday.
|
||||
(
|
||||
Timestamp(2072, 10, 1, 6, 17, 18),
|
||||
4,
|
||||
{2: {"end": 1, "business_end": 1, "business_start": 1}},
|
||||
),
|
||||
# First business day.
|
||||
(
|
||||
Timestamp(2072, 10, 3, 6, 17, 18),
|
||||
4,
|
||||
{2: {"end": 1, "business_end": 1}, -1: {"start": 0}},
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("n", [2, -1])
|
||||
def test_roll_qtr_day_mod_equal(other, month, exp_dict, n, day_opt):
|
||||
# All cases have (other.month % 3) == (month % 3).
|
||||
expected = exp_dict.get(n, {}).get(day_opt, n)
|
||||
assert roll_qtrday(other, n, month, day_opt, modby=3) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"n,expected", [(42, {29: 42, 1: 42, 31: 41}), (-4, {29: -4, 1: -3, 31: -4})]
|
||||
)
|
||||
@pytest.mark.parametrize("compare", [29, 1, 31])
|
||||
def test_roll_convention(n, expected, compare):
|
||||
assert liboffsets.roll_convention(29, n, compare) == expected[compare]
|
@@ -0,0 +1,41 @@
|
||||
"""Tests for functions from pandas._libs.tslibs"""
|
||||
|
||||
from datetime import date, datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from pandas._libs import tslibs
|
||||
from pandas._libs.tslibs.timestamps import Timestamp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value,expected",
|
||||
[
|
||||
(date(2012, 9, 7), datetime(2012, 9, 7)),
|
||||
(datetime(2012, 9, 7, 12), datetime(2012, 9, 7)),
|
||||
(datetime(2007, 10, 1, 1, 12, 5, 10), datetime(2007, 10, 1)),
|
||||
],
|
||||
)
|
||||
def test_normalize_date(value, expected):
|
||||
result = tslibs.normalize_date(value)
|
||||
assert result == expected
|
||||
|
||||
|
||||
class SubDatetime(datetime):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt, expected",
|
||||
[
|
||||
(Timestamp(2000, 1, 1, 1), Timestamp(2000, 1, 1, 0)),
|
||||
(datetime(2000, 1, 1, 1), datetime(2000, 1, 1, 0)),
|
||||
(SubDatetime(2000, 1, 1, 1), SubDatetime(2000, 1, 1, 0)),
|
||||
],
|
||||
)
|
||||
def test_normalize_date_sub_types(dt, expected):
|
||||
# GH 25851
|
||||
# ensure that subclassed datetime works with
|
||||
# normalize_date
|
||||
result = tslibs.normalize_date(dt)
|
||||
assert result == expected
|
@@ -0,0 +1,74 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from pandas._libs import tslib
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str, exp",
|
||||
[
|
||||
("2011-01-02", datetime(2011, 1, 2)),
|
||||
("2011-1-2", datetime(2011, 1, 2)),
|
||||
("2011-01", datetime(2011, 1, 1)),
|
||||
("2011-1", datetime(2011, 1, 1)),
|
||||
("2011 01 02", datetime(2011, 1, 2)),
|
||||
("2011.01.02", datetime(2011, 1, 2)),
|
||||
("2011/01/02", datetime(2011, 1, 2)),
|
||||
("2011\\01\\02", datetime(2011, 1, 2)),
|
||||
("2013-01-01 05:30:00", datetime(2013, 1, 1, 5, 30)),
|
||||
("2013-1-1 5:30:00", datetime(2013, 1, 1, 5, 30)),
|
||||
],
|
||||
)
|
||||
def test_parsers_iso8601(date_str, exp):
|
||||
# see gh-12060
|
||||
#
|
||||
# Test only the ISO parser - flexibility to
|
||||
# different separators and leading zero's.
|
||||
actual = tslib._test_parse_iso8601(date_str)
|
||||
assert actual == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str",
|
||||
[
|
||||
"2011-01/02",
|
||||
"2011=11=11",
|
||||
"201401",
|
||||
"201111",
|
||||
"200101",
|
||||
# Mixed separated and unseparated.
|
||||
"2005-0101",
|
||||
"200501-01",
|
||||
"20010101 12:3456",
|
||||
"20010101 1234:56",
|
||||
# HHMMSS must have two digits in
|
||||
# each component if unseparated.
|
||||
"20010101 1",
|
||||
"20010101 123",
|
||||
"20010101 12345",
|
||||
"20010101 12345Z",
|
||||
],
|
||||
)
|
||||
def test_parsers_iso8601_invalid(date_str):
|
||||
msg = 'Error parsing datetime string "{s}"'.format(s=date_str)
|
||||
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
tslib._test_parse_iso8601(date_str)
|
||||
|
||||
|
||||
def test_parsers_iso8601_invalid_offset_invalid():
|
||||
date_str = "2001-01-01 12-34-56"
|
||||
msg = "Timezone hours offset out of range " 'in datetime string "{s}"'.format(
|
||||
s=date_str
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
tslib._test_parse_iso8601(date_str)
|
||||
|
||||
|
||||
def test_parsers_iso8601_leading_space():
|
||||
# GH#25895 make sure isoparser doesn't overflow with long input
|
||||
date_str, expected = ("2013-1-1 5:30:00", datetime(2013, 1, 1, 5, 30))
|
||||
actual = tslib._test_parse_iso8601(" " * 200 + date_str)
|
||||
assert actual == expected
|
@@ -0,0 +1,211 @@
|
||||
"""
|
||||
Tests for Timestamp parsing, aimed at pandas/_libs/tslibs/parsing.pyx
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.parser import parse
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from pandas._libs.tslibs import parsing
|
||||
from pandas._libs.tslibs.parsing import parse_time_string
|
||||
import pandas.util._test_decorators as td
|
||||
|
||||
from pandas.util import testing as tm
|
||||
|
||||
|
||||
def test_parse_time_string():
|
||||
(date, parsed, reso) = parse_time_string("4Q1984")
|
||||
(date_lower, parsed_lower, reso_lower) = parse_time_string("4q1984")
|
||||
|
||||
assert date == date_lower
|
||||
assert reso == reso_lower
|
||||
assert parsed == parsed_lower
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dashed,normal", [("1988-Q2", "1988Q2"), ("2Q-1988", "2Q1988")]
|
||||
)
|
||||
def test_parse_time_quarter_with_dash(dashed, normal):
|
||||
# see gh-9688
|
||||
(date_dash, parsed_dash, reso_dash) = parse_time_string(dashed)
|
||||
(date, parsed, reso) = parse_time_string(normal)
|
||||
|
||||
assert date_dash == date
|
||||
assert parsed_dash == parsed
|
||||
assert reso_dash == reso
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dashed", ["-2Q1992", "2-Q1992", "4-4Q1992"])
|
||||
def test_parse_time_quarter_with_dash_error(dashed):
|
||||
msg = "Unknown datetime string format, unable to parse: {dashed}"
|
||||
|
||||
with pytest.raises(parsing.DateParseError, match=msg.format(dashed=dashed)):
|
||||
parse_time_string(dashed)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_string,expected",
|
||||
[
|
||||
("123.1234", False),
|
||||
("-50000", False),
|
||||
("999", False),
|
||||
("m", False),
|
||||
("T", False),
|
||||
("Mon Sep 16, 2013", True),
|
||||
("2012-01-01", True),
|
||||
("01/01/2012", True),
|
||||
("01012012", True),
|
||||
("0101", True),
|
||||
("1-1", True),
|
||||
],
|
||||
)
|
||||
def test_does_not_convert_mixed_integer(date_string, expected):
|
||||
assert parsing._does_string_look_like_datetime(date_string) is expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str,kwargs,msg",
|
||||
[
|
||||
(
|
||||
"2013Q5",
|
||||
dict(),
|
||||
(
|
||||
"Incorrect quarterly string is given, "
|
||||
"quarter must be between 1 and 4: 2013Q5"
|
||||
),
|
||||
),
|
||||
# see gh-5418
|
||||
(
|
||||
"2013Q1",
|
||||
dict(freq="INVLD-L-DEC-SAT"),
|
||||
(
|
||||
"Unable to retrieve month information "
|
||||
"from given freq: INVLD-L-DEC-SAT"
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parsers_quarterly_with_freq_error(date_str, kwargs, msg):
|
||||
with pytest.raises(parsing.DateParseError, match=msg):
|
||||
parsing.parse_time_string(date_str, **kwargs)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str,freq,expected",
|
||||
[
|
||||
("2013Q2", None, datetime(2013, 4, 1)),
|
||||
("2013Q2", "A-APR", datetime(2012, 8, 1)),
|
||||
("2013-Q2", "A-DEC", datetime(2013, 4, 1)),
|
||||
],
|
||||
)
|
||||
def test_parsers_quarterly_with_freq(date_str, freq, expected):
|
||||
result, _, _ = parsing.parse_time_string(date_str, freq=freq)
|
||||
assert result == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str", ["2Q 2005", "2Q-200A", "2Q-200", "22Q2005", "2Q200.", "6Q-20"]
|
||||
)
|
||||
def test_parsers_quarter_invalid(date_str):
|
||||
if date_str == "6Q-20":
|
||||
msg = (
|
||||
"Incorrect quarterly string is given, quarter "
|
||||
"must be between 1 and 4: {date_str}"
|
||||
)
|
||||
else:
|
||||
msg = "Unknown datetime string format, unable to parse: {date_str}"
|
||||
|
||||
with pytest.raises(ValueError, match=msg.format(date_str=date_str)):
|
||||
parsing.parse_time_string(date_str)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date_str,expected",
|
||||
[("201101", datetime(2011, 1, 1, 0, 0)), ("200005", datetime(2000, 5, 1, 0, 0))],
|
||||
)
|
||||
def test_parsers_month_freq(date_str, expected):
|
||||
result, _, _ = parsing.parse_time_string(date_str, freq="M")
|
||||
assert result == expected
|
||||
|
||||
|
||||
@td.skip_if_not_us_locale
|
||||
@pytest.mark.parametrize(
|
||||
"string,fmt",
|
||||
[
|
||||
("20111230", "%Y%m%d"),
|
||||
("2011-12-30", "%Y-%m-%d"),
|
||||
("30-12-2011", "%d-%m-%Y"),
|
||||
("2011-12-30 00:00:00", "%Y-%m-%d %H:%M:%S"),
|
||||
("2011-12-30T00:00:00", "%Y-%m-%dT%H:%M:%S"),
|
||||
("2011-12-30 00:00:00.000000", "%Y-%m-%d %H:%M:%S.%f"),
|
||||
],
|
||||
)
|
||||
def test_guess_datetime_format_with_parseable_formats(string, fmt):
|
||||
result = parsing._guess_datetime_format(string)
|
||||
assert result == fmt
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dayfirst,expected", [(True, "%d/%m/%Y"), (False, "%m/%d/%Y")])
|
||||
def test_guess_datetime_format_with_dayfirst(dayfirst, expected):
|
||||
ambiguous_string = "01/01/2011"
|
||||
result = parsing._guess_datetime_format(ambiguous_string, dayfirst=dayfirst)
|
||||
assert result == expected
|
||||
|
||||
|
||||
@td.skip_if_has_locale
|
||||
@pytest.mark.parametrize(
|
||||
"string,fmt",
|
||||
[
|
||||
("30/Dec/2011", "%d/%b/%Y"),
|
||||
("30/December/2011", "%d/%B/%Y"),
|
||||
("30/Dec/2011 00:00:00", "%d/%b/%Y %H:%M:%S"),
|
||||
],
|
||||
)
|
||||
def test_guess_datetime_format_with_locale_specific_formats(string, fmt):
|
||||
result = parsing._guess_datetime_format(string)
|
||||
assert result == fmt
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_dt",
|
||||
[
|
||||
"2013",
|
||||
"01/2013",
|
||||
"12:00:00",
|
||||
"1/1/1/1",
|
||||
"this_is_not_a_datetime",
|
||||
"51a",
|
||||
9,
|
||||
datetime(2011, 1, 1),
|
||||
],
|
||||
)
|
||||
def test_guess_datetime_format_invalid_inputs(invalid_dt):
|
||||
# A datetime string must include a year, month and a day for it to be
|
||||
# guessable, in addition to being a string that looks like a datetime.
|
||||
assert parsing._guess_datetime_format(invalid_dt) is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"string,fmt",
|
||||
[
|
||||
("2011-1-1", "%Y-%m-%d"),
|
||||
("1/1/2011", "%m/%d/%Y"),
|
||||
("30-1-2011", "%d-%m-%Y"),
|
||||
("2011-1-1 0:0:0", "%Y-%m-%d %H:%M:%S"),
|
||||
("2011-1-3T00:00:0", "%Y-%m-%dT%H:%M:%S"),
|
||||
("2011-1-1 00:00:00", "%Y-%m-%d %H:%M:%S"),
|
||||
],
|
||||
)
|
||||
def test_guess_datetime_format_no_padding(string, fmt):
|
||||
# see gh-11142
|
||||
result = parsing._guess_datetime_format(string)
|
||||
assert result == fmt
|
||||
|
||||
|
||||
def test_try_parse_dates():
|
||||
arr = np.array(["5/1/2000", "6/1/2000", "7/1/2000"], dtype=object)
|
||||
result = parsing.try_parse_dates(arr, dayfirst=True)
|
||||
|
||||
expected = np.array([parse(d, dayfirst=True) for d in arr])
|
||||
tm.assert_numpy_array_equal(result, expected)
|
@@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
|
||||
from pandas._libs.tslibs.frequencies import get_freq
|
||||
from pandas._libs.tslibs.period import period_asfreq, period_ordinal
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"freq1,freq2,expected",
|
||||
[
|
||||
("D", "H", 24),
|
||||
("D", "T", 1440),
|
||||
("D", "S", 86400),
|
||||
("D", "L", 86400000),
|
||||
("D", "U", 86400000000),
|
||||
("D", "N", 86400000000000),
|
||||
("H", "T", 60),
|
||||
("H", "S", 3600),
|
||||
("H", "L", 3600000),
|
||||
("H", "U", 3600000000),
|
||||
("H", "N", 3600000000000),
|
||||
("T", "S", 60),
|
||||
("T", "L", 60000),
|
||||
("T", "U", 60000000),
|
||||
("T", "N", 60000000000),
|
||||
("S", "L", 1000),
|
||||
("S", "U", 1000000),
|
||||
("S", "N", 1000000000),
|
||||
("L", "U", 1000),
|
||||
("L", "N", 1000000),
|
||||
("U", "N", 1000),
|
||||
],
|
||||
)
|
||||
def test_intra_day_conversion_factors(freq1, freq2, expected):
|
||||
assert period_asfreq(1, get_freq(freq1), get_freq(freq2), False) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"freq,expected", [("A", 0), ("M", 0), ("W", 1), ("D", 0), ("B", 0)]
|
||||
)
|
||||
def test_period_ordinal_start_values(freq, expected):
|
||||
# information for Jan. 1, 1970.
|
||||
assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq(freq)) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dt,expected",
|
||||
[
|
||||
((1970, 1, 4, 0, 0, 0, 0, 0), 1),
|
||||
((1970, 1, 5, 0, 0, 0, 0, 0), 2),
|
||||
((2013, 10, 6, 0, 0, 0, 0, 0), 2284),
|
||||
((2013, 10, 7, 0, 0, 0, 0, 0), 2285),
|
||||
],
|
||||
)
|
||||
def test_period_ordinal_week(dt, expected):
|
||||
args = dt + (get_freq("W"),)
|
||||
assert period_ordinal(*args) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"day,expected",
|
||||
[
|
||||
# Thursday (Oct. 3, 2013).
|
||||
(3, 11415),
|
||||
# Friday (Oct. 4, 2013).
|
||||
(4, 11416),
|
||||
# Saturday (Oct. 5, 2013).
|
||||
(5, 11417),
|
||||
# Sunday (Oct. 6, 2013).
|
||||
(6, 11417),
|
||||
# Monday (Oct. 7, 2013).
|
||||
(7, 11417),
|
||||
# Tuesday (Oct. 8, 2013).
|
||||
(8, 11418),
|
||||
],
|
||||
)
|
||||
def test_period_ordinal_business_day(day, expected):
|
||||
args = (2013, 10, day, 0, 0, 0, 0, 0, get_freq("B"))
|
||||
assert period_ordinal(*args) == expected
|
@@ -0,0 +1,31 @@
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from pandas._libs.tslibs.timedeltas import delta_to_nanoseconds
|
||||
|
||||
import pandas as pd
|
||||
from pandas import Timedelta
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"obj,expected",
|
||||
[
|
||||
(np.timedelta64(14, "D"), 14 * 24 * 3600 * 1e9),
|
||||
(Timedelta(minutes=-7), -7 * 60 * 1e9),
|
||||
(Timedelta(minutes=-7).to_pytimedelta(), -7 * 60 * 1e9),
|
||||
(pd.offsets.Nano(125), 125),
|
||||
(1, 1),
|
||||
(np.int64(2), 2),
|
||||
(np.int32(3), 3),
|
||||
],
|
||||
)
|
||||
def test_delta_to_nanoseconds(obj, expected):
|
||||
result = delta_to_nanoseconds(obj)
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_delta_to_nanoseconds_error():
|
||||
obj = np.array([123456789], dtype="m8[ns]")
|
||||
|
||||
with pytest.raises(TypeError, match="<class 'numpy.ndarray'>"):
|
||||
delta_to_nanoseconds(obj)
|
@@ -0,0 +1,108 @@
|
||||
from datetime import datetime
|
||||
|
||||
import dateutil.tz
|
||||
import pytest
|
||||
import pytz
|
||||
|
||||
from pandas._libs.tslibs import conversion, timezones
|
||||
|
||||
from pandas import Timestamp
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tz_name", list(pytz.common_timezones))
|
||||
def test_cache_keys_are_distinct_for_pytz_vs_dateutil(tz_name):
|
||||
if tz_name == "UTC":
|
||||
pytest.skip("UTC: special case in dateutil")
|
||||
|
||||
tz_p = timezones.maybe_get_tz(tz_name)
|
||||
tz_d = timezones.maybe_get_tz("dateutil/" + tz_name)
|
||||
|
||||
if tz_d is None:
|
||||
pytest.skip(tz_name + ": dateutil does not know about this one")
|
||||
|
||||
assert timezones._p_tz_cache_key(tz_p) != timezones._p_tz_cache_key(tz_d)
|
||||
|
||||
|
||||
def test_tzlocal_repr():
|
||||
# see gh-13583
|
||||
ts = Timestamp("2011-01-01", tz=dateutil.tz.tzlocal())
|
||||
assert ts.tz == dateutil.tz.tzlocal()
|
||||
assert "tz='tzlocal()')" in repr(ts)
|
||||
|
||||
|
||||
def test_tzlocal_maybe_get_tz():
|
||||
# see gh-13583
|
||||
tz = timezones.maybe_get_tz("tzlocal()")
|
||||
assert tz == dateutil.tz.tzlocal()
|
||||
|
||||
|
||||
def test_tzlocal_offset():
|
||||
# see gh-13583
|
||||
#
|
||||
# Get offset using normal datetime for test.
|
||||
ts = Timestamp("2011-01-01", tz=dateutil.tz.tzlocal())
|
||||
|
||||
offset = dateutil.tz.tzlocal().utcoffset(datetime(2011, 1, 1))
|
||||
offset = offset.total_seconds() * 1000000000
|
||||
|
||||
assert ts.value + offset == Timestamp("2011-01-01").value
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
params=[
|
||||
(pytz.timezone("US/Eastern"), lambda tz, x: tz.localize(x)),
|
||||
(dateutil.tz.gettz("US/Eastern"), lambda tz, x: x.replace(tzinfo=tz)),
|
||||
]
|
||||
)
|
||||
def infer_setup(request):
|
||||
eastern, localize = request.param
|
||||
|
||||
start_naive = datetime(2001, 1, 1)
|
||||
end_naive = datetime(2009, 1, 1)
|
||||
|
||||
start = localize(eastern, start_naive)
|
||||
end = localize(eastern, end_naive)
|
||||
|
||||
return eastern, localize, start, end, start_naive, end_naive
|
||||
|
||||
|
||||
def test_infer_tz_compat(infer_setup):
|
||||
eastern, _, start, end, start_naive, end_naive = infer_setup
|
||||
|
||||
assert (
|
||||
timezones.infer_tzinfo(start, end)
|
||||
is conversion.localize_pydatetime(start_naive, eastern).tzinfo
|
||||
)
|
||||
assert (
|
||||
timezones.infer_tzinfo(start, None)
|
||||
is conversion.localize_pydatetime(start_naive, eastern).tzinfo
|
||||
)
|
||||
assert (
|
||||
timezones.infer_tzinfo(None, end)
|
||||
is conversion.localize_pydatetime(end_naive, eastern).tzinfo
|
||||
)
|
||||
|
||||
|
||||
def test_infer_tz_utc_localize(infer_setup):
|
||||
_, _, start, end, start_naive, end_naive = infer_setup
|
||||
utc = pytz.utc
|
||||
|
||||
start = utc.localize(start_naive)
|
||||
end = utc.localize(end_naive)
|
||||
|
||||
assert timezones.infer_tzinfo(start, end) is utc
|
||||
|
||||
|
||||
@pytest.mark.parametrize("ordered", [True, False])
|
||||
def test_infer_tz_mismatch(infer_setup, ordered):
|
||||
eastern, _, _, _, start_naive, end_naive = infer_setup
|
||||
msg = "Inputs must both have the same timezone"
|
||||
|
||||
utc = pytz.utc
|
||||
start = utc.localize(start_naive)
|
||||
end = conversion.localize_pydatetime(end_naive, eastern)
|
||||
|
||||
args = (start, end) if ordered else (end, start)
|
||||
|
||||
with pytest.raises(AssertionError, match=msg):
|
||||
timezones.infer_tzinfo(*args)
|
Reference in New Issue
Block a user