diff --git a/README.md b/README.md
index 66b281bc4..6fb46d538 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,7 @@ mypy round.py
 we get the following error message:
 
 ```text
-round.py:6: error: Argument "decimals" to "round" of "DataFrame" has incompatible type "DataFrame"; expected "Union[int, Dict[Any, Any], Series[Any]]"  [arg-type]
+round.py:6: error: Argument "decimals" to "round" of "DataFrame" has incompatible type "DataFrame"; expected "Union[int, Dict[Any, Any], Series]"  [arg-type]
 Found 1 error in 1 file (checked 1 source file)
 ```
 
diff --git a/docs/philosophy.md b/docs/philosophy.md
index bc9106317..853c516e1 100644
--- a/docs/philosophy.md
+++ b/docs/philosophy.md
@@ -29,8 +29,8 @@ lt = s < 3
 
 In the pandas source, `lt` is a `Series` with a `dtype` of `bool`.  In the pandas-stubs,
 the type of `lt` is `Series[bool]`.  This allows further type checking to occur in other
-pandas methods.  Note that in the above example, `s` is typed as `Series[Any]` because
-its type cannot be statically inferred.
+pandas methods.  Note that in the above example, `s` is just typed as `Series` (which
+defaults to `Series[Any]`) because its type cannot be statically inferred.
 
 This also allows type checking for operations on series that contain date/time data.  Consider
 the following example that creates two series of datetimes with corresponding arithmetic.
diff --git a/pandas-stubs/_libs/tslibs/timestamps.pyi b/pandas-stubs/_libs/tslibs/timestamps.pyi
index 99de96630..795f98a9a 100644
--- a/pandas-stubs/_libs/tslibs/timestamps.pyi
+++ b/pandas-stubs/_libs/tslibs/timestamps.pyi
@@ -19,9 +19,9 @@ from typing import (
 import numpy as np
 from pandas import (
     DatetimeIndex,
+    Index,
     TimedeltaIndex,
 )
-from pandas.core.indexes.base import UnknownIndex
 from pandas.core.series import (
     Series,
     TimedeltaSeries,
@@ -236,7 +236,7 @@ class Timestamp(datetime, SupportsIndex):
     @overload
     def __eq__(self, other: TimestampSeries) -> Series[bool]: ...  # type: ignore[overload-overlap]
     @overload
-    def __eq__(self, other: npt.NDArray[np.datetime64] | UnknownIndex) -> np_ndarray_bool: ...  # type: ignore[overload-overlap]
+    def __eq__(self, other: npt.NDArray[np.datetime64] | Index) -> np_ndarray_bool: ...  # type: ignore[overload-overlap]
     @overload
     def __eq__(self, other: object) -> Literal[False]: ...
     @overload
@@ -244,7 +244,7 @@ class Timestamp(datetime, SupportsIndex):
     @overload
     def __ne__(self, other: TimestampSeries) -> Series[bool]: ...  # type: ignore[overload-overlap]
     @overload
-    def __ne__(self, other: npt.NDArray[np.datetime64] | UnknownIndex) -> np_ndarray_bool: ...  # type: ignore[overload-overlap]
+    def __ne__(self, other: npt.NDArray[np.datetime64] | Index) -> np_ndarray_bool: ...  # type: ignore[overload-overlap]
     @overload
     def __ne__(self, other: object) -> Literal[True]: ...
     def __hash__(self) -> int: ...
diff --git a/pandas-stubs/_typing.pyi b/pandas-stubs/_typing.pyi
index 7ee8e3f95..20c6849e8 100644
--- a/pandas-stubs/_typing.pyi
+++ b/pandas-stubs/_typing.pyi
@@ -18,7 +18,6 @@ from typing import (
     Protocol,
     SupportsIndex,
     TypedDict,
-    TypeVar,
     Union,
     overload,
 )
@@ -36,6 +35,7 @@ from pandas.core.tools.datetimes import FulldatetimeDict
 from typing_extensions import (
     ParamSpec,
     TypeAlias,
+    TypeVar,
 )
 
 from pandas._libs.interval import Interval
@@ -66,7 +66,7 @@ HashableT5 = TypeVar("HashableT5", bound=Hashable)
 # array-like
 
 ArrayLike: TypeAlias = ExtensionArray | np.ndarray
-AnyArrayLike: TypeAlias = ArrayLike | Index[Any] | Series[Any]
+AnyArrayLike: TypeAlias = ArrayLike | Index | Series
 
 # list-like
 
@@ -803,7 +803,7 @@ DtypeNp = TypeVar("DtypeNp", bound=np.dtype[np.generic])
 KeysArgType: TypeAlias = Any
 ListLikeT = TypeVar("ListLikeT", bound=ListLike)
 ListLikeExceptSeriesAndStr: TypeAlias = (
-    MutableSequence[Any] | np.ndarray | tuple[Any, ...] | Index[Any]
+    MutableSequence[Any] | np.ndarray | tuple[Any, ...] | Index
 )
 ListLikeU: TypeAlias = Sequence | np.ndarray | Series | Index
 ListLikeHashable: TypeAlias = (
@@ -826,29 +826,8 @@ MaskType: TypeAlias = Series[bool] | np_ndarray_bool | list[bool]
 
 # Scratch types for generics
 
-S1 = TypeVar(
-    "S1",
-    bound=str
-    | bytes
-    | datetime.date
-    | datetime.time
-    | bool
-    | int
-    | float
-    | complex
-    | Dtype
-    | datetime.datetime  # includes pd.Timestamp
-    | datetime.timedelta  # includes pd.Timedelta
-    | Period
-    | Interval
-    | CategoricalDtype
-    | BaseOffset
-    | list[str],
-)
-
-S2 = TypeVar(
-    "S2",
-    bound=str
+SeriesDType: TypeAlias = (
+    str
     | bytes
     | datetime.date
     | datetime.time
@@ -863,8 +842,12 @@ S2 = TypeVar(
     | Interval
     | CategoricalDtype
     | BaseOffset
-    | list[str],
+    | list[str]
 )
+S1 = TypeVar("S1", bound=SeriesDType, default=Any)
+# Like S1, but without `default=Any`.
+S2 = TypeVar("S2", bound=SeriesDType)
+S3 = TypeVar("S3", bound=SeriesDType)
 
 IndexingInt: TypeAlias = (
     int | np.int_ | np.integer | np.unsignedinteger | np.signedinteger | np.int8
@@ -951,7 +934,7 @@ ReplaceValue: TypeAlias = (
     | NAType
     | Sequence[Scalar | Pattern]
     | Mapping[HashableT, ScalarT]
-    | Series[Any]
+    | Series
     | None
 )
 
diff --git a/pandas-stubs/core/dtypes/missing.pyi b/pandas-stubs/core/dtypes/missing.pyi
index 11319304d..e36496cfd 100644
--- a/pandas-stubs/core/dtypes/missing.pyi
+++ b/pandas-stubs/core/dtypes/missing.pyi
@@ -26,9 +26,9 @@ isneginf_scalar = ...
 @overload
 def isna(obj: DataFrame) -> DataFrame: ...
 @overload
-def isna(obj: Series[Any]) -> Series[bool]: ...
+def isna(obj: Series) -> Series[bool]: ...
 @overload
-def isna(obj: Index[Any] | list[Any] | ArrayLike) -> npt.NDArray[np.bool_]: ...
+def isna(obj: Index | list[Any] | ArrayLike) -> npt.NDArray[np.bool_]: ...
 @overload
 def isna(
     obj: Scalar | NaTType | NAType | None,
@@ -39,9 +39,9 @@ isnull = isna
 @overload
 def notna(obj: DataFrame) -> DataFrame: ...
 @overload
-def notna(obj: Series[Any]) -> Series[bool]: ...
+def notna(obj: Series) -> Series[bool]: ...
 @overload
-def notna(obj: Index[Any] | list[Any] | ArrayLike) -> npt.NDArray[np.bool_]: ...
+def notna(obj: Index | list[Any] | ArrayLike) -> npt.NDArray[np.bool_]: ...
 @overload
 def notna(obj: ScalarT | NaTType | NAType | None) -> TypeIs[ScalarT]: ...
 
diff --git a/pandas-stubs/core/frame.pyi b/pandas-stubs/core/frame.pyi
index 0c8166f05..2a9875d83 100644
--- a/pandas-stubs/core/frame.pyi
+++ b/pandas-stubs/core/frame.pyi
@@ -56,7 +56,6 @@ from pandas.core.reshape.pivot import (
 )
 from pandas.core.series import (
     Series,
-    UnknownSeries,
 )
 from pandas.core.window import (
     Expanding,
@@ -79,7 +78,7 @@ from pandas._libs.tslibs import BaseOffset
 from pandas._libs.tslibs.nattype import NaTType
 from pandas._libs.tslibs.offsets import DateOffset
 from pandas._typing import (
-    S1,
+    S2,
     AggFuncTypeBase,
     AggFuncTypeDictFrame,
     AggFuncTypeDictSeries,
@@ -1319,11 +1318,11 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @overload
     def stack(
         self, level: Level | list[Level] = ..., dropna: _bool = ..., sort: _bool = ...
-    ) -> Self | Series[Any]: ...
+    ) -> Self | Series: ...
     @overload
     def stack(
         self, level: Level | list[Level] = ..., future_stack: _bool = ...
-    ) -> Self | Series[Any]: ...
+    ) -> Self | Series: ...
     def explode(
         self, column: Sequence[Hashable], ignore_index: _bool = ...
     ) -> Self: ...
@@ -1383,7 +1382,7 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @overload
     def apply(
         self,
-        f: Callable[..., ListLikeExceptSeriesAndStr | Series[Any]],
+        f: Callable[..., ListLikeExceptSeriesAndStr | Series],
         axis: AxisIndex = ...,
         raw: _bool = ...,
         result_type: None = ...,
@@ -1393,13 +1392,14 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @overload
     def apply(
         self,
-        f: Callable[..., S1 | NAType],
+        # Use S2 (TypeVar without `default=Any`) instead of S1 due to https://github.com/python/mypy/issues/19182.
+        f: Callable[..., S2 | NAType],
         axis: AxisIndex = ...,
         raw: _bool = ...,
         result_type: None = ...,
         args: Any = ...,
         **kwargs: Any,
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     # Since non-scalar type T is not supported in Series[T],
     # we separate this overload from the above one
     @overload
@@ -1411,24 +1411,25 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
         result_type: None = ...,
         args: Any = ...,
         **kwargs: Any,
-    ) -> Series[Any]: ...
+    ) -> Series: ...
 
     # apply() overloads with keyword result_type, and axis does not matter
     @overload
     def apply(
         self,
-        f: Callable[..., S1 | NAType],
+        # Use S2 (TypeVar without `default=Any`) instead of S1 due to https://github.com/python/mypy/issues/19182.
+        f: Callable[..., S2 | NAType],
         axis: Axis = ...,
         raw: _bool = ...,
         args: Any = ...,
         *,
         result_type: Literal["expand", "reduce"],
         **kwargs: Any,
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     @overload
     def apply(
         self,
-        f: Callable[..., ListLikeExceptSeriesAndStr | Series[Any] | Mapping[Any, Any]],
+        f: Callable[..., ListLikeExceptSeriesAndStr | Series | Mapping[Any, Any]],
         axis: Axis = ...,
         raw: _bool = ...,
         args: Any = ...,
@@ -1446,12 +1447,12 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
         *,
         result_type: Literal["reduce"],
         **kwargs: Any,
-    ) -> Series[Any]: ...
+    ) -> Series: ...
     @overload
     def apply(
         self,
         f: Callable[
-            ..., ListLikeExceptSeriesAndStr | Series[Any] | Scalar | Mapping[Any, Any]
+            ..., ListLikeExceptSeriesAndStr | Series | Scalar | Mapping[Any, Any]
         ],
         axis: Axis = ...,
         raw: _bool = ...,
@@ -1465,27 +1466,28 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @overload
     def apply(
         self,
-        f: Callable[..., Series[Any]],
+        f: Callable[..., Series],
         axis: AxisIndex = ...,
         raw: _bool = ...,
         args: Any = ...,
         *,
         result_type: Literal["reduce"],
         **kwargs: Any,
-    ) -> Series[Any]: ...
+    ) -> Series: ...
 
     # apply() overloads with default result_type of None, and keyword axis=1 matters
     @overload
     def apply(
         self,
-        f: Callable[..., S1 | NAType],
+        # Use S2 (TypeVar without `default=Any`) instead of S1 due to https://github.com/python/mypy/issues/19182.
+        f: Callable[..., S2 | NAType],
         raw: _bool = ...,
         result_type: None = ...,
         args: Any = ...,
         *,
         axis: AxisColumn,
         **kwargs: Any,
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     @overload
     def apply(
         self,
@@ -1496,11 +1498,11 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
         *,
         axis: AxisColumn,
         **kwargs: Any,
-    ) -> Series[Any]: ...
+    ) -> Series: ...
     @overload
     def apply(
         self,
-        f: Callable[..., Series[Any]],
+        f: Callable[..., Series],
         raw: _bool = ...,
         result_type: None = ...,
         args: Any = ...,
@@ -1513,7 +1515,7 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @overload
     def apply(
         self,
-        f: Callable[..., Series[Any]],
+        f: Callable[..., Series],
         raw: _bool = ...,
         args: Any = ...,
         *,
@@ -1538,7 +1540,7 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     ) -> Self: ...
     def merge(
         self,
-        right: DataFrame | Series[Any],
+        right: DataFrame | Series,
         how: MergeHow = ...,
         on: IndexLabel | AnyArrayLike | None = ...,
         left_on: IndexLabel | AnyArrayLike | None = ...,
@@ -1684,6 +1686,8 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
     @property
     def iloc(self) -> _iLocIndexerFrame[Self]: ...
     @property
+    # mypy complains if we use Index[Any] instead of UnknownIndex here, even though
+    # the latter is aliased to the former ¯\_(ツ)_/¯.
     def index(self) -> UnknownIndex: ...
     @index.setter
     def index(self, idx: Index) -> None: ...
@@ -2012,7 +2016,7 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
             | Callable[[DataFrame], DataFrame]
             | Callable[[Any], _bool]
         ),
-        other: Scalar | UnknownSeries | DataFrame | Callable | NAType | None = ...,
+        other: Scalar | Series | DataFrame | Callable | NAType | None = ...,
         *,
         inplace: Literal[True],
         axis: Axis | None = ...,
@@ -2028,7 +2032,7 @@ class DataFrame(NDFrame, OpsMixin, _GetItemHack):
             | Callable[[DataFrame], DataFrame]
             | Callable[[Any], _bool]
         ),
-        other: Scalar | UnknownSeries | DataFrame | Callable | NAType | None = ...,
+        other: Scalar | Series | DataFrame | Callable | NAType | None = ...,
         *,
         inplace: Literal[False] = ...,
         axis: Axis | None = ...,
diff --git a/pandas-stubs/core/generic.pyi b/pandas-stubs/core/generic.pyi
index ad1d5499e..edcc45d8b 100644
--- a/pandas-stubs/core/generic.pyi
+++ b/pandas-stubs/core/generic.pyi
@@ -24,7 +24,7 @@ from pandas import Index
 import pandas.core.indexing as indexing
 from pandas.core.resample import DatetimeIndexResampler
 from pandas.core.series import (
-    UnknownSeries,
+    Series,
 )
 import sqlalchemy.engine
 from typing_extensions import (
@@ -83,7 +83,7 @@ class NDFrame(indexing.IndexingMixin):
     def ndim(self) -> int: ...
     @property
     def size(self) -> int: ...
-    def equals(self, other: UnknownSeries) -> _bool: ...
+    def equals(self, other: Series) -> _bool: ...
     def __neg__(self) -> Self: ...
     def __pos__(self) -> Self: ...
     def __nonzero__(self) -> None: ...
@@ -307,7 +307,7 @@ class NDFrame(indexing.IndexingMixin):
         labels: None = ...,
         *,
         axis: Axis = ...,
-        index: Hashable | Sequence[Hashable] | Index[Any] = ...,
+        index: Hashable | Sequence[Hashable] | Index = ...,
         columns: Hashable | Iterable[Hashable],
         level: Level | None = ...,
         inplace: Literal[True],
@@ -319,7 +319,7 @@ class NDFrame(indexing.IndexingMixin):
         labels: None = ...,
         *,
         axis: Axis = ...,
-        index: Hashable | Sequence[Hashable] | Index[Any],
+        index: Hashable | Sequence[Hashable] | Index,
         columns: Hashable | Iterable[Hashable] = ...,
         level: Level | None = ...,
         inplace: Literal[True],
@@ -328,7 +328,7 @@ class NDFrame(indexing.IndexingMixin):
     @overload
     def drop(
         self,
-        labels: Hashable | Sequence[Hashable] | Index[Any],
+        labels: Hashable | Sequence[Hashable] | Index,
         *,
         axis: Axis = ...,
         index: None = ...,
@@ -343,7 +343,7 @@ class NDFrame(indexing.IndexingMixin):
         labels: None = ...,
         *,
         axis: Axis = ...,
-        index: Hashable | Sequence[Hashable] | Index[Any] = ...,
+        index: Hashable | Sequence[Hashable] | Index = ...,
         columns: Hashable | Iterable[Hashable],
         level: Level | None = ...,
         inplace: Literal[False] = ...,
@@ -355,7 +355,7 @@ class NDFrame(indexing.IndexingMixin):
         labels: None = ...,
         *,
         axis: Axis = ...,
-        index: Hashable | Sequence[Hashable] | Index[Any],
+        index: Hashable | Sequence[Hashable] | Index,
         columns: Hashable | Iterable[Hashable] = ...,
         level: Level | None = ...,
         inplace: Literal[False] = ...,
@@ -364,7 +364,7 @@ class NDFrame(indexing.IndexingMixin):
     @overload
     def drop(
         self,
-        labels: Hashable | Sequence[Hashable] | Index[Any],
+        labels: Hashable | Sequence[Hashable] | Index,
         *,
         axis: Axis = ...,
         index: None = ...,
diff --git a/pandas-stubs/core/groupby/generic.pyi b/pandas-stubs/core/groupby/generic.pyi
index 920c962fc..d2d025de0 100644
--- a/pandas-stubs/core/groupby/generic.pyi
+++ b/pandas-stubs/core/groupby/generic.pyi
@@ -24,10 +24,7 @@ from pandas.core.groupby.groupby import (
     GroupBy,
     GroupByPlot,
 )
-from pandas.core.series import (
-    Series,
-    UnknownSeries,
-)
+from pandas.core.series import Series
 from typing_extensions import (
     Self,
     TypeAlias,
@@ -35,8 +32,8 @@ from typing_extensions import (
 
 from pandas._libs.tslibs.timestamps import Timestamp
 from pandas._typing import (
-    S1,
     S2,
+    S3,
     AggFuncTypeBase,
     AggFuncTypeFrame,
     ByT,
@@ -59,26 +56,26 @@ class NamedAgg(NamedTuple):
     column: str
     aggfunc: AggScalar
 
-class SeriesGroupBy(GroupBy[Series[S1]], Generic[S1, ByT]):
+class SeriesGroupBy(GroupBy[Series[S2]], Generic[S2, ByT]):
     @overload
     def aggregate(
         self,
-        func: Callable[Concatenate[Series[S1], P], S2],
+        func: Callable[Concatenate[Series[S2], P], S3],
         /,
         *args,
         engine: WindowingEngine = ...,
         engine_kwargs: WindowingEngineKwargs = ...,
         **kwargs,
-    ) -> Series[S2]: ...
+    ) -> Series[S3]: ...
     @overload
     def aggregate(
         self,
-        func: Callable[[Series], S2],
+        func: Callable[[Series], S3],
         *args,
         engine: WindowingEngine = ...,
         engine_kwargs: WindowingEngineKwargs = ...,
         **kwargs,
-    ) -> Series[S2]: ...
+    ) -> Series[S3]: ...
     @overload
     def aggregate(
         self,
@@ -98,29 +95,29 @@ class SeriesGroupBy(GroupBy[Series[S1]], Generic[S1, ByT]):
         engine: WindowingEngine = ...,
         engine_kwargs: WindowingEngineKwargs = ...,
         **kwargs,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     agg = aggregate
     @overload
     def transform(
         self,
-        func: Callable[Concatenate[Series[S1], P], Series[S2]],
+        func: Callable[Concatenate[Series[S2], P], Series[S3]],
         /,
         *args: Any,
         engine: WindowingEngine = ...,
         engine_kwargs: WindowingEngineKwargs = ...,
         **kwargs: Any,
-    ) -> Series[S2]: ...
+    ) -> Series[S3]: ...
     @overload
     def transform(
         self,
         func: Callable,
         *args: Any,
         **kwargs: Any,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def transform(
         self, func: TransformReductionListType, *args, **kwargs
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     def filter(
         self, func: Callable | str, dropna: bool = ..., *args, **kwargs
     ) -> Series: ...
@@ -155,7 +152,7 @@ class SeriesGroupBy(GroupBy[Series[S1]], Generic[S1, ByT]):
         self,
         indices: TakeIndexer,
         **kwargs,
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     def skew(
         self,
         skipna: bool = ...,
@@ -166,10 +163,10 @@ class SeriesGroupBy(GroupBy[Series[S1]], Generic[S1, ByT]):
     def plot(self) -> GroupByPlot[Self]: ...
     def nlargest(
         self, n: int = ..., keep: NsmallestNlargestKeep = ...
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     def nsmallest(
         self, n: int = ..., keep: NsmallestNlargestKeep = ...
-    ) -> Series[S1]: ...
+    ) -> Series[S2]: ...
     def idxmin(self, skipna: bool = ...) -> Series: ...
     def idxmax(self, skipna: bool = ...) -> Series: ...
     def corr(
@@ -207,7 +204,7 @@ class SeriesGroupBy(GroupBy[Series[S1]], Generic[S1, ByT]):
     @final  # type: ignore[misc]
     def __iter__(  # pyright: ignore[reportIncompatibleMethodOverride]
         self,
-    ) -> Iterator[tuple[ByT, Series[S1]]]: ...
+    ) -> Iterator[tuple[ByT, Series[S2]]]: ...
 
 _TT = TypeVar("_TT", bound=Literal[True, False])
 
diff --git a/pandas-stubs/core/indexes/multi.pyi b/pandas-stubs/core/indexes/multi.pyi
index f0b00ec39..d6bbd6258 100644
--- a/pandas-stubs/core/indexes/multi.pyi
+++ b/pandas-stubs/core/indexes/multi.pyi
@@ -5,7 +5,6 @@ from collections.abc import (
     Sequence,
 )
 from typing import (
-    Any,
     overload,
 )
 
@@ -27,7 +26,7 @@ from pandas._typing import (
     np_ndarray_bool,
 )
 
-class MultiIndex(Index[Any]):
+class MultiIndex(Index):
     def __new__(
         cls,
         levels: Sequence[SequenceNotStr[Hashable]] = ...,
diff --git a/pandas-stubs/core/reshape/concat.pyi b/pandas-stubs/core/reshape/concat.pyi
index 5ea794cee..9b8624f56 100644
--- a/pandas-stubs/core/reshape/concat.pyi
+++ b/pandas-stubs/core/reshape/concat.pyi
@@ -4,7 +4,6 @@ from collections.abc import (
     Sequence,
 )
 from typing import (
-    Any,
     Literal,
     overload,
 )
@@ -40,7 +39,7 @@ def concat(  # type: ignore[overload-overlap]
 ) -> DataFrame: ...
 @overload
 def concat(  # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
-    objs: Iterable[Series[Any]] | Mapping[HashableT1, Series[Any]],
+    objs: Iterable[Series] | Mapping[HashableT1, Series],
     *,
     axis: AxisIndex = ...,
     join: Literal["inner", "outer"] = ...,
@@ -51,12 +50,10 @@ def concat(  # type: ignore[overload-overlap] # pyright: ignore[reportOverlappin
     verify_integrity: bool = ...,
     sort: bool = ...,
     copy: bool = ...,
-) -> Series[Any]: ...
+) -> Series: ...
 @overload
 def concat(  # type: ignore[overload-overlap] # pyright: ignore[reportOverlappingOverload]
-    objs: (
-        Iterable[Series[Any] | DataFrame] | Mapping[HashableT1, Series[Any] | DataFrame]
-    ),
+    objs: Iterable[Series | DataFrame] | Mapping[HashableT1, Series | DataFrame],
     *,
     axis: Axis = ...,
     join: Literal["inner", "outer"] = ...,
@@ -98,7 +95,7 @@ def concat(  # type: ignore[overload-overlap]
 ) -> DataFrame: ...
 @overload
 def concat(  # type: ignore[overload-overlap]
-    objs: Iterable[Series[Any] | None] | Mapping[HashableT1, Series[Any] | None],
+    objs: Iterable[Series | None] | Mapping[HashableT1, Series | None],
     *,
     axis: AxisIndex = ...,
     join: Literal["inner", "outer"] = ...,
@@ -109,12 +106,12 @@ def concat(  # type: ignore[overload-overlap]
     verify_integrity: bool = ...,
     sort: bool = ...,
     copy: bool = ...,
-) -> Series[Any]: ...
+) -> Series: ...
 @overload
 def concat(
     objs: (
-        Iterable[Series[Any] | DataFrame | None]
-        | Mapping[HashableT1, Series[Any] | DataFrame | None]
+        Iterable[Series | DataFrame | None]
+        | Mapping[HashableT1, Series | DataFrame | None]
     ),
     *,
     axis: Axis = ...,
@@ -129,7 +126,7 @@ def concat(
 ) -> DataFrame: ...
 
 # Including either of the next 2 overloads causes mypy to complain about
-# test_pandas.py:test_types_concat() in assert_type(pd.concat([s, s2]), "pd.Series")
+# test_pandas.py:test_types_concat() in assert_type(pd.concat([s, s2]), pd.Series)
 # It thinks that pd.concat([s, s2]) is Any .  May be due to Series being
 # Generic, or the axis argument being unspecified, and then there is partial
 # overlap with the first 2 overloads.
diff --git a/pandas-stubs/core/series.pyi b/pandas-stubs/core/series.pyi
index c87d84de6..22ef5c762 100644
--- a/pandas-stubs/core/series.pyi
+++ b/pandas-stubs/core/series.pyi
@@ -63,7 +63,6 @@ from pandas.core.indexes.accessors import (
     TimedeltaProperties,
     TimestampProperties,
 )
-from pandas.core.indexes.base import UnknownIndex
 from pandas.core.indexes.category import CategoricalIndex
 from pandas.core.indexes.datetimes import DatetimeIndex
 from pandas.core.indexes.interval import IntervalIndex
@@ -266,7 +265,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         dtype: Dtype = ...,
         name: Hashable = ...,
         copy: bool = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def __new__(
         cls,
@@ -420,7 +419,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         dtype: Dtype = ...,
         name: Hashable = ...,
         copy: bool = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @property
     def hasnans(self) -> bool: ...
     def div(
@@ -733,7 +732,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def groupby(
         self,
-        by: CategoricalIndex | UnknownIndex | UnknownSeries,
+        by: CategoricalIndex | Index | Series,
         axis: AxisIndex = ...,
         level: IndexLabel | None = ...,
         as_index: _bool = ...,
@@ -816,15 +815,15 @@ class Series(IndexOpsMixin[S1], NDFrame):
         self, other: ArrayLike | dict[_str, np.ndarray] | Sequence[S1] | Index[S1]
     ) -> np.ndarray: ...
     @overload
-    def __matmul__(self, other: UnknownSeries) -> Scalar: ...
+    def __matmul__(self, other: Series) -> Scalar: ...
     @overload
-    def __matmul__(self, other: DataFrame) -> UnknownSeries: ...
+    def __matmul__(self, other: DataFrame) -> Series: ...
     @overload
     def __matmul__(self, other: np.ndarray) -> np.ndarray: ...
     @overload
-    def __rmatmul__(self, other: UnknownSeries) -> Scalar: ...
+    def __rmatmul__(self, other: Series) -> Scalar: ...
     @overload
-    def __rmatmul__(self, other: DataFrame) -> UnknownSeries: ...
+    def __rmatmul__(self, other: DataFrame) -> Series: ...
     @overload
     def __rmatmul__(self, other: np.ndarray) -> np.ndarray: ...
     @overload
@@ -844,16 +843,16 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def compare(
         self,
-        other: UnknownSeries,
+        other: Series,
         align_axis: AxisIndex,
         keep_shape: bool = ...,
         keep_equal: bool = ...,
         result_names: Suffixes = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def compare(
         self,
-        other: UnknownSeries,
+        other: Series,
         align_axis: AxisColumn = ...,
         keep_shape: bool = ...,
         keep_equal: bool = ...,
@@ -953,9 +952,9 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def map(
         self,
-        arg: Callable[[Any], Any] | Mapping[Any, Any] | UnknownSeries,
+        arg: Callable[[Any], Any] | Mapping[Any, Any] | Series,
         na_action: Literal["ignore"] | None = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def aggregate(
         self: Series[int],
@@ -979,7 +978,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         axis: AxisIndex = ...,
         *args: Any,
         **kwargs: Any,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     agg = aggregate
     @overload
     def transform(
@@ -1006,7 +1005,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         convertDType: _bool = ...,
         args: tuple = ...,
         **kwargs: Any,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def apply(
         self,
@@ -1018,20 +1017,20 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def apply(
         self,
-        func: Callable[..., UnknownSeries],
+        func: Callable[..., Series],
         convertDType: _bool = ...,
         args: tuple = ...,
         **kwargs: Any,
     ) -> DataFrame: ...
     def align(
         self,
-        other: DataFrame | UnknownSeries,
+        other: DataFrame | Series,
         join: JoinHow = ...,
         axis: Axis | None = ...,
         level: Level | None = ...,
         copy: _bool = ...,
         fill_value: Scalar | NAType | None = ...,
-    ) -> tuple[UnknownSeries, UnknownSeries]: ...
+    ) -> tuple[Series, Series]: ...
     @overload
     def rename(
         self,
@@ -1126,7 +1125,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         freq: DateOffset | timedelta | _str | None = ...,
         axis: Axis = ...,
         fill_value: Scalar | NAType | None = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     def info(
         self,
         verbose: bool | None = ...,
@@ -1310,7 +1309,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         dtype: ObjectDtypeArg | VoidDtypeArg | ExtensionDtype | DtypeObj,
         copy: _bool = ...,
         errors: IgnoreRaise = ...,
-    ) -> Series[Any]: ...
+    ) -> Series: ...
     def copy(self, deep: _bool = ...) -> Series[S1]: ...
     def infer_objects(self) -> Series[S1]: ...
     @overload
@@ -1509,15 +1508,11 @@ class Series(IndexOpsMixin[S1], NDFrame):
             tuple[
                 Sequence[bool]
                 | Series[bool]
-                | Callable[
-                    [UnknownSeries], UnknownSeries | np.ndarray | Sequence[bool]
-                ],
-                ListLikeU
-                | Scalar
-                | Callable[[UnknownSeries], UnknownSeries | np.ndarray],
+                | Callable[[Series], Series | np.ndarray | Sequence[bool]],
+                ListLikeU | Scalar | Callable[[Series], Series | np.ndarray],
             ],
         ],
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     def truncate(
         self,
         before: date | _str | int | None = ...,
@@ -1588,16 +1583,8 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def __add__(
         self,
-        other: (
-            num
-            | _str
-            | timedelta
-            | Timedelta
-            | _ListLike
-            | UnknownSeries
-            | np.timedelta64
-        ),
-    ) -> UnknownSeries: ...
+        other: num | _str | timedelta | Timedelta | _ListLike | Series | np.timedelta64,
+    ) -> Series: ...
     # ignore needed for mypy as we want different results based on the arguments
     @overload  # type: ignore[override]
     def __and__(  # pyright: ignore[reportOverlappingOverload]
@@ -1626,7 +1613,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         self, other: timedelta | Timedelta | TimedeltaSeries | np.timedelta64
     ) -> TimedeltaSeries: ...
     @overload
-    def __mul__(self, other: num | _ListLike | UnknownSeries) -> UnknownSeries: ...
+    def __mul__(self, other: num | _ListLike | Series) -> Series: ...
     def __mod__(self, other: num | _ListLike | Series[S1]) -> Series[S1]: ...
     def __ne__(self, other: object) -> Series[_bool]: ...  # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]
     def __pow__(self, other: num | _ListLike | Series[S1]) -> Series[S1]: ...
@@ -1640,9 +1627,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def __radd__(self, other: S1 | Series[S1]) -> Self: ...
     @overload
-    def __radd__(
-        self, other: num | _str | _ListLike | UnknownSeries
-    ) -> UnknownSeries: ...
+    def __radd__(self, other: num | _str | _ListLike | Series) -> Series: ...
     # ignore needed for mypy as we want different results based on the arguments
     @overload  # type: ignore[override]
     def __rand__(  # pyright: ignore[reportOverlappingOverload]
@@ -1659,7 +1644,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         self, other: timedelta | Timedelta | TimedeltaSeries | np.timedelta64
     ) -> TimedeltaSeries: ...
     @overload
-    def __rmul__(self, other: num | _ListLike | UnknownSeries) -> UnknownSeries: ...
+    def __rmul__(self, other: num | _ListLike | Series) -> Series: ...
     def __rnatmul__(self, other: num | _ListLike | Series[S1]) -> Series[S1]: ...
     def __rpow__(self, other: num | _ListLike | Series[S1]) -> Series[S1]: ...
     # ignore needed for mypy as we want different results based on the arguments
@@ -1669,10 +1654,8 @@ class Series(IndexOpsMixin[S1], NDFrame):
     ) -> Series[bool]: ...
     @overload
     def __ror__(self, other: int | np_ndarray_anyint | Series[int]) -> Series[int]: ...
-    def __rsub__(self, other: num | _ListLike | Series[S1]) -> UnknownSeries: ...
-    def __rtruediv__(
-        self, other: num | _ListLike | Series[S1] | Path
-    ) -> UnknownSeries: ...
+    def __rsub__(self, other: num | _ListLike | Series[S1]) -> Series: ...
+    def __rtruediv__(self, other: num | _ListLike | Series[S1] | Path) -> Series: ...
     # ignore needed for mypy as we want different results based on the arguments
     @overload  # type: ignore[override]
     def __rxor__(  # pyright: ignore[reportOverlappingOverload]
@@ -1695,10 +1678,8 @@ class Series(IndexOpsMixin[S1], NDFrame):
         self, other: Timestamp | datetime | TimestampSeries
     ) -> TimedeltaSeries: ...
     @overload
-    def __sub__(self, other: num | _ListLike | UnknownSeries) -> UnknownSeries: ...
-    def __truediv__(
-        self, other: num | _ListLike | Series[S1] | Path
-    ) -> UnknownSeries: ...
+    def __sub__(self, other: num | _ListLike | Series) -> Series: ...
+    def __truediv__(self, other: num | _ListLike | Series[S1] | Path) -> Series: ...
     # ignore needed for mypy as we want different results based on the arguments
     @overload  # type: ignore[override]
     def __xor__(  # pyright: ignore[reportOverlappingOverload]
@@ -1801,12 +1782,12 @@ class Series(IndexOpsMixin[S1], NDFrame):
         min_periods: int = ...,
         adjust: _bool = ...,
         ignore_na: _bool = ...,
-    ) -> ExponentialMovingWindow[UnknownSeries]: ...
+    ) -> ExponentialMovingWindow[Series]: ...
     def expanding(
         self,
         min_periods: int = ...,
         method: CalculationMethod = ...,
-    ) -> Expanding[UnknownSeries]: ...
+    ) -> Expanding[Series]: ...
     def floordiv(
         self,
         other: num | _ListLike | Series[S1],
@@ -1909,11 +1890,11 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def mul(
         self,
-        other: num | _ListLike | UnknownSeries,
+        other: num | _ListLike | Series,
         level: Level | None = ...,
         fill_value: float | None = ...,
         axis: AxisIndex | None = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     def multiply(
         self,
         other: num | _ListLike | Series[S1],
@@ -1993,11 +1974,11 @@ class Series(IndexOpsMixin[S1], NDFrame):
     @overload
     def rmul(
         self,
-        other: num | _ListLike | UnknownSeries,
+        other: num | _ListLike | Series,
         level: Level | None = ...,
         fill_value: float | None = ...,
         axis: AxisIndex = ...,
-    ) -> UnknownSeries: ...
+    ) -> Series: ...
     @overload
     def rolling(
         self,
@@ -2010,7 +1991,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         method: CalculationMethod = ...,
         *,
         win_type: _str,
-    ) -> Window[UnknownSeries]: ...
+    ) -> Window[Series]: ...
     @overload
     def rolling(
         self,
@@ -2023,7 +2004,7 @@ class Series(IndexOpsMixin[S1], NDFrame):
         method: CalculationMethod = ...,
         *,
         win_type: None = ...,
-    ) -> Rolling[UnknownSeries]: ...
+    ) -> Rolling[Series]: ...
     def rpow(
         self,
         other: Series[S1] | Scalar,
@@ -2361,5 +2342,3 @@ class IntervalSeries(Series[Interval[_OrderableT]], Generic[_OrderableT]):
     @property
     def array(self) -> IntervalArray: ...
     def diff(self, periods: int = ...) -> Never: ...
-
-UnknownSeries: TypeAlias = Series[Any]
diff --git a/pandas-stubs/io/formats/style_render.pyi b/pandas-stubs/io/formats/style_render.pyi
index 15f4bcd30..5550d8d71 100644
--- a/pandas-stubs/io/formats/style_render.pyi
+++ b/pandas-stubs/io/formats/style_render.pyi
@@ -86,6 +86,6 @@ class StylerRenderer:
         level: Level | list[Level] | None = ...,
     ) -> Self: ...
     @property
-    def columns(self) -> Index[Any]: ...
+    def columns(self) -> Index: ...
     @property
-    def index(self) -> Index[Any]: ...
+    def index(self) -> Index: ...
diff --git a/tests/test_frame.py b/tests/test_frame.py
index a2c21e4b8..a951c8aa0 100644
--- a/tests/test_frame.py
+++ b/tests/test_frame.py
@@ -336,7 +336,7 @@ def test_assign() -> None:
     def my_named_func_1(df: pd.DataFrame) -> pd.Series[str]:
         return df["a"]
 
-    def my_named_func_2(df: pd.DataFrame) -> pd.Series[Any]:
+    def my_named_func_2(df: pd.DataFrame) -> pd.Series:
         return df["a"]
 
     check(assert_type(df.assign(c=lambda df: df["a"] * 2), pd.DataFrame), pd.DataFrame)
@@ -702,8 +702,8 @@ def test_frame_iterator() -> None:
     """Test iterator methods for a dataframe GH1217."""
     df = pd.DataFrame(data={"col1": [2, 1], "col2": [3, 4]})
 
-    check(assert_type(next(df.items()), tuple[Hashable, "pd.Series"]), tuple)
-    check(assert_type(next(df.iterrows()), tuple[Hashable, "pd.Series"]), tuple)
+    check(assert_type(next(df.items()), tuple[Hashable, pd.Series]), tuple)
+    check(assert_type(next(df.iterrows()), tuple[Hashable, pd.Series]), tuple)
     check(assert_type(next(df.itertuples()), _PandasNamedTuple), _PandasNamedTuple)
 
 
@@ -3121,15 +3121,13 @@ def test_frame_stack() -> None:
         upper="2.3.99",
     ):
         check(
-            assert_type(
-                df_multi_level_cols2.stack(0), Union[pd.DataFrame, "pd.Series[Any]"]
-            ),
+            assert_type(df_multi_level_cols2.stack(0), Union[pd.DataFrame, pd.Series]),
             pd.DataFrame,
         )
         check(
             assert_type(
                 df_multi_level_cols2.stack([0, 1]),
-                Union[pd.DataFrame, "pd.Series[Any]"],
+                Union[pd.DataFrame, pd.Series],
             ),
             pd.Series,
         )
@@ -3137,14 +3135,14 @@ def test_frame_stack() -> None:
             check(
                 assert_type(
                     df_multi_level_cols2.stack(0, future_stack=False),
-                    Union[pd.DataFrame, "pd.Series[Any]"],
+                    Union[pd.DataFrame, pd.Series],
                 ),
                 pd.DataFrame,
             )
             check(
                 assert_type(
                     df_multi_level_cols2.stack(0, dropna=True, sort=True),
-                    Union[pd.DataFrame, "pd.Series[Any]"],
+                    Union[pd.DataFrame, pd.Series],
                 ),
                 pd.DataFrame,
             )
diff --git a/tests/test_series.py b/tests/test_series.py
index c47c02f6d..a2ace21e5 100644
--- a/tests/test_series.py
+++ b/tests/test_series.py
@@ -2958,13 +2958,13 @@ def test_astype_object(cast_arg: ObjectDtypeArg, target_type: type) -> None:
 
     if TYPE_CHECKING:
         # python object
-        assert_type(s.astype(object), "pd.Series[Any]")
-        assert_type(s.astype("object"), "pd.Series[Any]")
+        assert_type(s.astype(object), pd.Series)
+        assert_type(s.astype("object"), pd.Series)
         # numpy object
-        assert_type(s.astype(np.object_), "pd.Series[Any]")
-        # assert_type(s.astype("object_"), "pd.Series[Any]")  # NOTE: not assigned
-        # assert_type(s.astype("object0"), "pd.Series[Any]")  # NOTE: not assigned
-        assert_type(s.astype("O"), "pd.Series[Any]")
+        assert_type(s.astype(np.object_), pd.Series)
+        # assert_type(s.astype("object_"), pd.Series)  # NOTE: not assigned
+        # assert_type(s.astype("object0"), pd.Series)  # NOTE: not assigned
+        assert_type(s.astype("O"), pd.Series)
 
 
 @pytest.mark.parametrize("cast_arg, target_type", ASTYPE_VOID_ARGS, ids=repr)
@@ -2974,9 +2974,9 @@ def test_astype_void(cast_arg: VoidDtypeArg, target_type: type) -> None:
 
     if TYPE_CHECKING:
         # numpy void
-        assert_type(s.astype(np.void), "pd.Series[Any]")
-        assert_type(s.astype("void"), "pd.Series[Any]")
-        assert_type(s.astype("V"), "pd.Series[Any]")
+        assert_type(s.astype(np.void), pd.Series)
+        assert_type(s.astype("void"), pd.Series)
+        assert_type(s.astype("V"), pd.Series)
 
 
 def test_astype_other() -> None:
@@ -2988,7 +2988,7 @@ def test_astype_other() -> None:
 
     # Test self-consistent with s.dtype (#747)
     # NOTE: https://github.com/python/typing/issues/801#issuecomment-1646171898
-    check(assert_type(s.astype(s.dtype), "pd.Series[Any]"), pd.Series, np.integer)
+    check(assert_type(s.astype(s.dtype), pd.Series), pd.Series, np.integer)
 
     # test DecimalDtype
     orseries = pd.Series([Decimal(x) for x in [1, 2, 3]])
@@ -3003,7 +3003,7 @@ def test_astype_other() -> None:
     # Test non-literal string
     # NOTE: currently unsupported! Enable in future.
     # string: str = "int"  # not Literal!
-    # check(assert_type(s.astype(string), "pd.Series[Any]"), pd.Series, np.integer)
+    # check(assert_type(s.astype(string), pd.Series), pd.Series, np.integer)
 
 
 def test_all_astype_args_tested() -> None:
@@ -3310,7 +3310,7 @@ def test_get() -> None:
 
 def test_series_new_empty() -> None:
     # GH 826
-    check(assert_type(pd.Series(), "pd.Series[Any]"), pd.Series)
+    check(assert_type(pd.Series(), pd.Series), pd.Series)
 
 
 def test_series_mapping() -> None:
@@ -3492,10 +3492,10 @@ def first_arg_not_series(argument_1: int, ser: pd.Series) -> pd.Series:
 
 def test_series_apply() -> None:
     s = pd.Series(["A", "B", "AB"])
-    check(assert_type(s.apply(tuple), "pd.Series[Any]"), pd.Series)
-    check(assert_type(s.apply(list), "pd.Series[Any]"), pd.Series)
-    check(assert_type(s.apply(set), "pd.Series[Any]"), pd.Series)
-    check(assert_type(s.apply(frozenset), "pd.Series[Any]"), pd.Series)
+    check(assert_type(s.apply(tuple), pd.Series), pd.Series)
+    check(assert_type(s.apply(list), pd.Series), pd.Series)
+    check(assert_type(s.apply(set), pd.Series), pd.Series)
+    check(assert_type(s.apply(frozenset), pd.Series), pd.Series)
 
 
 def test_diff() -> None:
@@ -3657,7 +3657,7 @@ def callable(x: int) -> str:
 
     unknown_series = pd.Series([1, 0, None])
     check(
-        assert_type(unknown_series.map({1: True, 0: False, None: None}), "pd.Series"),
+        assert_type(unknown_series.map({1: True, 0: False, None: None}), pd.Series),
         pd.Series,
     )
 
@@ -3757,10 +3757,10 @@ class MyDict(TypedDict):
 
 
 def test_series_empty_dtype() -> None:
-    """Test for the creation of a Series from an empty list GH571 to map to a Series[Any]."""
+    """Test for the creation of a Series from an empty list GH571 to map to a Series."""
     new_tab: Sequence[Never] = []  # need to be typehinted to please mypy
-    check(assert_type(pd.Series(new_tab), "pd.Series[Any]"), pd.Series)
-    check(assert_type(pd.Series([]), "pd.Series[Any]"), pd.Series)
+    check(assert_type(pd.Series(new_tab), pd.Series), pd.Series)
+    check(assert_type(pd.Series([]), pd.Series), pd.Series)
     # ensure that an empty string does not get matched to Sequence[Never]
     check(assert_type(pd.Series(""), "pd.Series[str]"), pd.Series)