Example code (works with both mypy and pyright for Python 3.12):
from dataclasses import dataclass
from typing import TypeVar, Generic
T_co = TypeVar("T_co", covariant=True)
@dataclass
class OldDataclass(Generic[T_co]):
x: T_co # error expected, not reported
y: list[T_co] # error expected, not reported
class OldBox(Generic[T_co]):
def __init__(self, value: T_co) -> None:
self.value: T_co = value # error expected, not reported
class OldShelf(Generic[T_co]):
def __init__(self, values: list[T_co]) -> None:
self._values: list[T_co] = values
def get_values(self) -> list[T_co]: # error expected, not reported
return self._values
def set_values(self, new_values: list[T_co]) -> None: # error expected, not reported
self._values = new_values
@dataclass
class NewDataclass[T]: # T inferred as invariant
x: T
y: list[T]
class NewBox[T]: # T inferred as invariant
def __init__(self, value: T) -> None:
self.value: T = value
class NewShelf[T]: # T inferred as invariant
def __init__(self, values: list[T]) -> None:
self._values: list[T] = values
def get_values(self) -> list[T]:
return self._values
def set_values(self, new_values: list[T]) -> None:
self._values = new_values
It seems like type checkers don’t rigorously check that generic types are parameterized with typevars of correct variance when old-style type variables are used. Is that intentional, given that type checkers can now infer the correct inference?
It seems like some checking is performed to catch the most simple errors:
class OldBox(Generic[T_co]):
def set_value(self, value: T_co) -> None:
# ^^^^
# error: Cannot use a covariant type variable as a parameter
...