Skip to content
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/source/whatsnew/v3.0.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -745,6 +745,7 @@ Other API changes
the dtype of the resulting Index (:issue:`60797`)
- :class:`IncompatibleFrequency` now subclasses ``TypeError`` instead of ``ValueError``. As a result, joins with mismatched frequencies now cast to object like other non-comparable joins, and arithmetic with indexes with mismatched frequencies align (:issue:`55782`)
- :class:`Series` "flex" methods like :meth:`Series.add` no longer allow passing a :class:`DataFrame` for ``other``; use the DataFrame reversed method instead (:issue:`46179`)
- :func:`date_range` and :func:`timedelta_range` no longer default to ``unit="ns"``, instead will infer a unit from the ``start``, ``end``, and ``freq`` parameters. Explicitly specify a desired ``unit`` to override these (:issue:`59031`)
- :meth:`CategoricalIndex.append` no longer attempts to cast different-dtype indexes to the caller's dtype (:issue:`41626`)
- :meth:`ExtensionDtype.construct_array_type` is now a regular method instead of a ``classmethod`` (:issue:`58663`)
- Comparison operations between :class:`Index` and :class:`Series` now consistently return :class:`Series` regardless of which object is on the left or right (:issue:`36759`)
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslibs/np_datetime.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ class OutOfBoundsTimedelta(ValueError):
Examples
--------
>>> pd.date_range(start="1/1/1700", freq="B", periods=100000)
>>> pd.date_range(start="1/1/1700", freq="B", periods=100000, unit="ns")
Traceback (most recent call last):
OutOfBoundsTimedelta: Cannot cast 139999 days 00:00:00
to unit='ns' without overflow.
Expand Down
18 changes: 9 additions & 9 deletions pandas/core/arrays/datetimelike.py
Original file line number Diff line number Diff line change
Expand Up @@ -1649,7 +1649,7 @@ def mean(self, *, skipna: bool = True, axis: AxisInt | None = 0):
>>> idx = pd.date_range("2001-01-01 00:00", periods=3)
>>> idx
DatetimeIndex(['2001-01-01', '2001-01-02', '2001-01-03'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')
>>> idx.mean()
Timestamp('2001-01-02 00:00:00')

Expand Down Expand Up @@ -1900,21 +1900,21 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]:
>>> rng
DatetimeIndex(['2018-01-01 11:59:00', '2018-01-01 12:00:00',
'2018-01-01 12:01:00'],
dtype='datetime64[ns]', freq='min')
dtype='datetime64[us]', freq='min')
"""

_round_example = """>>> rng.round('h')
DatetimeIndex(['2018-01-01 12:00:00', '2018-01-01 12:00:00',
'2018-01-01 12:00:00'],
dtype='datetime64[ns]', freq=None)
dtype='datetime64[us]', freq=None)

**Series**

>>> pd.Series(rng).dt.round("h")
0 2018-01-01 12:00:00
1 2018-01-01 12:00:00
2 2018-01-01 12:00:00
dtype: datetime64[ns]
dtype: datetime64[us]

When rounding near a daylight savings time transition, use ``ambiguous`` or
``nonexistent`` to control how the timestamp should be re-localized.
Expand All @@ -1933,15 +1933,15 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]:
_floor_example = """>>> rng.floor('h')
DatetimeIndex(['2018-01-01 11:00:00', '2018-01-01 12:00:00',
'2018-01-01 12:00:00'],
dtype='datetime64[ns]', freq=None)
dtype='datetime64[us]', freq=None)

**Series**

>>> pd.Series(rng).dt.floor("h")
0 2018-01-01 11:00:00
1 2018-01-01 12:00:00
2 2018-01-01 12:00:00
dtype: datetime64[ns]
dtype: datetime64[us]

When rounding near a daylight savings time transition, use ``ambiguous`` or
``nonexistent`` to control how the timestamp should be re-localized.
Expand All @@ -1960,15 +1960,15 @@ def strftime(self, date_format: str) -> npt.NDArray[np.object_]:
_ceil_example = """>>> rng.ceil('h')
DatetimeIndex(['2018-01-01 12:00:00', '2018-01-01 12:00:00',
'2018-01-01 13:00:00'],
dtype='datetime64[ns]', freq=None)
dtype='datetime64[us]', freq=None)

**Series**

>>> pd.Series(rng).dt.ceil("h")
0 2018-01-01 12:00:00
1 2018-01-01 12:00:00
2 2018-01-01 13:00:00
dtype: datetime64[ns]
dtype: datetime64[us]

When rounding near a daylight savings time transition, use ``ambiguous`` or
``nonexistent`` to control how the timestamp should be re-localized.
Expand Down Expand Up @@ -2018,7 +2018,7 @@ def freq(self):
'2022-02-22 06:22:22-06:00', '2022-02-22 07:22:22-06:00',
'2022-02-22 08:22:22-06:00', '2022-02-22 09:22:22-06:00',
'2022-02-22 10:22:22-06:00', '2022-02-22 11:22:22-06:00'],
dtype='datetime64[ns, America/Chicago]', freq='h')
dtype='datetime64[us, America/Chicago]', freq='h')
>>> datetimeindex.freq
<Hour>
"""
Expand Down
64 changes: 32 additions & 32 deletions pandas/core/arrays/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -913,13 +913,13 @@ def tz_convert(self, tz) -> Self:
DatetimeIndex(['2014-08-01 09:00:00+02:00',
'2014-08-01 10:00:00+02:00',
'2014-08-01 11:00:00+02:00'],
dtype='datetime64[ns, Europe/Berlin]', freq='h')
dtype='datetime64[us, Europe/Berlin]', freq='h')

>>> dti.tz_convert(None)
DatetimeIndex(['2014-08-01 07:00:00',
'2014-08-01 08:00:00',
'2014-08-01 09:00:00'],
dtype='datetime64[ns]', freq='h')
dtype='datetime64[us]', freq='h')
""" # noqa: E501
tz = timezones.maybe_get_tz(tz)

Expand Down Expand Up @@ -1010,7 +1010,7 @@ def tz_localize(
>>> tz_naive
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')

Localize DatetimeIndex in US/Eastern time zone:

Expand All @@ -1019,15 +1019,15 @@ def tz_localize(
DatetimeIndex(['2018-03-01 09:00:00-05:00',
'2018-03-02 09:00:00-05:00',
'2018-03-03 09:00:00-05:00'],
dtype='datetime64[ns, US/Eastern]', freq=None)
dtype='datetime64[us, US/Eastern]', freq=None)

With the ``tz=None``, we can remove the time zone information
while keeping the local time (not converted to UTC):

>>> tz_aware.tz_localize(None)
DatetimeIndex(['2018-03-01 09:00:00', '2018-03-02 09:00:00',
'2018-03-03 09:00:00'],
dtype='datetime64[ns]', freq=None)
dtype='datetime64[us]', freq=None)

Be careful with DST changes. When there is sequential data, pandas can
infer the DST time:
Expand Down Expand Up @@ -1180,12 +1180,12 @@ def normalize(self) -> Self:
DatetimeIndex(['2014-08-01 10:00:00+05:30',
'2014-08-01 11:00:00+05:30',
'2014-08-01 12:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq='h')
dtype='datetime64[us, Asia/Calcutta]', freq='h')
>>> idx.normalize()
DatetimeIndex(['2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30',
'2014-08-01 00:00:00+05:30'],
dtype='datetime64[ns, Asia/Calcutta]', freq=None)
dtype='datetime64[us, Asia/Calcutta]', freq=None)
"""
new_values = normalize_i8_timestamps(self.asi8, self.tz, reso=self._creso)
dt64_values = new_values.view(self._ndarray.dtype)
Expand Down Expand Up @@ -1309,7 +1309,7 @@ def month_name(self, locale=None) -> npt.NDArray[np.object_]:
0 2018-01-31
1 2018-02-28
2 2018-03-31
dtype: datetime64[ns]
dtype: datetime64[us]
>>> s.dt.month_name()
0 January
1 February
Expand All @@ -1319,7 +1319,7 @@ def month_name(self, locale=None) -> npt.NDArray[np.object_]:
>>> idx = pd.date_range(start="2018-01", freq="ME", periods=3)
>>> idx
DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'],
dtype='datetime64[ns]', freq='ME')
dtype='datetime64[us]', freq='ME')
>>> idx.month_name()
Index(['January', 'February', 'March'], dtype='str')

Expand All @@ -1330,7 +1330,7 @@ def month_name(self, locale=None) -> npt.NDArray[np.object_]:
>>> idx = pd.date_range(start="2018-01", freq="ME", periods=3)
>>> idx
DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31'],
dtype='datetime64[ns]', freq='ME')
dtype='datetime64[us]', freq='ME')
>>> idx.month_name(locale="pt_BR.utf8") # doctest: +SKIP
Index(['Janeiro', 'Fevereiro', 'Março'], dtype='str')
"""
Expand Down Expand Up @@ -1377,7 +1377,7 @@ def day_name(self, locale=None) -> npt.NDArray[np.object_]:
0 2018-01-01
1 2018-01-02
2 2018-01-03
dtype: datetime64[ns]
dtype: datetime64[us]
>>> s.dt.day_name()
0 Monday
1 Tuesday
Expand All @@ -1387,7 +1387,7 @@ def day_name(self, locale=None) -> npt.NDArray[np.object_]:
>>> idx = pd.date_range(start="2018-01-01", freq="D", periods=3)
>>> idx
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')
>>> idx.day_name()
Index(['Monday', 'Tuesday', 'Wednesday'], dtype='str')

Expand All @@ -1398,7 +1398,7 @@ def day_name(self, locale=None) -> npt.NDArray[np.object_]:
>>> idx = pd.date_range(start="2018-01-01", freq="D", periods=3)
>>> idx
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')
>>> idx.day_name(locale="pt_BR.utf8") # doctest: +SKIP
Index(['Segunda', 'Terça', 'Quarta'], dtype='str')
"""
Expand Down Expand Up @@ -1611,7 +1611,7 @@ def isocalendar(self) -> DataFrame:
0 2000-12-31
1 2001-12-31
2 2002-12-31
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.year
0 2000
1 2001
Expand Down Expand Up @@ -1639,7 +1639,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-31
1 2000-02-29
2 2000-03-31
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.month
0 1
1 2
Expand Down Expand Up @@ -1668,7 +1668,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-01
1 2000-01-02
2 2000-01-03
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.day
0 1
1 2
Expand Down Expand Up @@ -1697,7 +1697,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-01 00:00:00
1 2000-01-01 01:00:00
2 2000-01-01 02:00:00
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.hour
0 0
1 1
Expand Down Expand Up @@ -1725,7 +1725,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-01 00:00:00
1 2000-01-01 00:01:00
2 2000-01-01 00:02:00
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.minute
0 0
1 1
Expand Down Expand Up @@ -1754,7 +1754,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-01 00:00:00
1 2000-01-01 00:00:01
2 2000-01-01 00:00:02
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.second
0 0
1 1
Expand Down Expand Up @@ -1782,7 +1782,7 @@ def isocalendar(self) -> DataFrame:
0 2000-01-01 00:00:00.000000
1 2000-01-01 00:00:00.000001
2 2000-01-01 00:00:00.000002
dtype: datetime64[ns]
dtype: datetime64[us]
>>> datetime_series.dt.microsecond
0 0
1 1
Expand Down Expand Up @@ -1982,7 +1982,7 @@ def isocalendar(self) -> DataFrame:
0 2018-02-27
1 2018-02-28
2 2018-03-01
dtype: datetime64[ns]
dtype: datetime64[us]
>>> s.dt.is_month_start
0 False
1 False
Expand Down Expand Up @@ -2044,7 +2044,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')

>>> idx.is_quarter_start
array([False, False, True, False])
Expand Down Expand Up @@ -2086,7 +2086,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range('2017-03-30', periods=4)
>>> idx
DatetimeIndex(['2017-03-30', '2017-03-31', '2017-04-01', '2017-04-02'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')

>>> idx.is_quarter_end
array([False, True, False, False])
Expand Down Expand Up @@ -2119,7 +2119,7 @@ def isocalendar(self) -> DataFrame:
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
dtype: datetime64[us]

>>> dates.dt.is_year_start
0 False
Expand All @@ -2130,7 +2130,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')

>>> idx.is_year_start
array([False, False, True])
Expand All @@ -2144,7 +2144,7 @@ def isocalendar(self) -> DataFrame:
1 2022-01-03
2 2023-01-02
3 2024-01-01
dtype: datetime64[ns]
dtype: datetime64[us]

>>> dates.dt.is_year_start
0 True
Expand All @@ -2156,7 +2156,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range("2020-10-30", periods=4, freq="BYS")
>>> idx
DatetimeIndex(['2021-01-01', '2022-01-03', '2023-01-02', '2024-01-01'],
dtype='datetime64[ns]', freq='BYS-JAN')
dtype='datetime64[us]', freq='BYS-JAN')

>>> idx.is_year_start
array([ True, True, True, True])
Expand Down Expand Up @@ -2189,7 +2189,7 @@ def isocalendar(self) -> DataFrame:
0 2017-12-30
1 2017-12-31
2 2018-01-01
dtype: datetime64[ns]
dtype: datetime64[us]

>>> dates.dt.is_year_end
0 False
Expand All @@ -2200,7 +2200,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range("2017-12-30", periods=3)
>>> idx
DatetimeIndex(['2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')

>>> idx.is_year_end
array([False, True, False])
Expand Down Expand Up @@ -2237,7 +2237,7 @@ def isocalendar(self) -> DataFrame:
>>> idx = pd.date_range("2012-01-01", "2015-01-01", freq="YE")
>>> idx
DatetimeIndex(['2012-12-31', '2013-12-31', '2014-12-31'],
dtype='datetime64[ns]', freq='YE-DEC')
dtype='datetime64[us]', freq='YE-DEC')
>>> idx.is_leap_year
array([ True, False, False])

Expand All @@ -2246,7 +2246,7 @@ def isocalendar(self) -> DataFrame:
0 2012-12-31
1 2013-12-31
2 2014-12-31
dtype: datetime64[ns]
dtype: datetime64[us]
>>> dates_series.dt.is_leap_year
0 True
1 False
Expand Down Expand Up @@ -2380,7 +2380,7 @@ def std(
>>> idx = pd.date_range("2001-01-01 00:00", periods=3)
>>> idx
DatetimeIndex(['2001-01-01', '2001-01-02', '2001-01-03'],
dtype='datetime64[ns]', freq='D')
dtype='datetime64[us]', freq='D')
>>> idx.std()
Timedelta('1 days 00:00:00')
"""
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -6446,7 +6446,7 @@ def astype(
0 2020-01-01
1 2020-01-02
2 2020-01-03
dtype: datetime64[ns]
dtype: datetime64[us]
"""
self._check_copy_deprecation(copy)
if is_dict_like(dtype):
Expand Down
Loading
Loading