Skip to content

Commit

Permalink
Migrate from Tuple -> tuple in torch/_dynamo (#144261)
Browse files Browse the repository at this point in the history
Summary:
X-link: pytorch/pytorch#144261
Approved by: https://github.com/aorenste, https://github.com/zou3519

Reviewed By: clee2000

Differential Revision: D68023204

Pulled By: bobrenjc93

fbshipit-source-id: 2f8a435a35083bc00bc6edb2208223364a3c47a6
  • Loading branch information
bobrenjc93 authored and facebook-github-bot committed Jan 11, 2025
1 parent e7f85d2 commit 9b7cac9
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 13 deletions.
3 changes: 1 addition & 2 deletions userbenchmark/dynamo/dynamobench/_dynamo/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
Optional,
overload,
Sequence,
Tuple,
TypeVar,
Union,
)
Expand Down Expand Up @@ -141,7 +140,7 @@ def reduce_to_scalar_loss(out: torch.Tensor) -> torch.Tensor:

@overload
def reduce_to_scalar_loss(
out: Union[List[Any], Tuple[Any, ...], Dict[Any, Any]]
out: Union[List[Any], tuple[Any, ...], Dict[Any, Any]]
) -> float:
...

Expand Down
21 changes: 10 additions & 11 deletions userbenchmark/dynamo/dynamobench/_dynamo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
Optional,
overload,
Set,
Tuple,
Type,
TypeVar,
Union,
Expand Down Expand Up @@ -106,7 +105,7 @@

# NOTE: Make sure `NP_SUPPORTED_MODULES` and `NP_TO_TNP_MODULE` are in sync.
if np:
NP_SUPPORTED_MODULES: Tuple[types.ModuleType, ...] = (
NP_SUPPORTED_MODULES: tuple[types.ModuleType, ...] = (
np,
np.fft,
np.linalg,
Expand Down Expand Up @@ -202,8 +201,8 @@ def log(cls):


def tabulate(
rows: Union[List[Tuple[str, object]], List[List[object]]],
headers: Union[Tuple[str, ...], List[str]],
rows: Union[List[tuple[str, object]], List[List[object]]],
headers: Union[tuple[str, ...], List[str]],
) -> str:
try:
import tabulate
Expand Down Expand Up @@ -590,7 +589,7 @@ def compile_times(repr: Literal["str"], aggregate: bool = False) -> str:
@overload
def compile_times(
repr: Literal["csv"], aggregate: bool = False
) -> Tuple[List[str], List[object]]:
) -> tuple[List[str], List[object]]:
...


Expand Down Expand Up @@ -658,7 +657,7 @@ def __init__(self, maxsize: int = 4096) -> None:
def reset(self):
self.set = OrderedDict()

def add(self, key: Union[str, Tuple[object, object]]) -> bool:
def add(self, key: Union[str, tuple[object, object]]) -> bool:
if key in self.set:
self.set.move_to_end(key, last=True)
if not config.verbose:
Expand Down Expand Up @@ -797,7 +796,7 @@ def istype(obj: object, allowed_types: Type[T]) -> TypeIs[T]:

@overload
def istype(
obj: object, allowed_types: Tuple[Type[List[T]], Type[Tuple[T, ...]]]
obj: object, allowed_types: tuple[Type[List[T]], Type[tuple[T, ...]]]
) -> TypeIs[T]:
...

Expand Down Expand Up @@ -940,7 +939,7 @@ def is_numpy_ndarray(value):

def istensor(obj):
"""Check of obj is a tensor"""
tensor_list: Tuple[type, ...] = (
tensor_list: tuple[type, ...] = (
torch.Tensor,
torch.nn.Parameter,
*config.traceable_tensor_subclasses,
Expand Down Expand Up @@ -1900,7 +1899,7 @@ def is_namedtuple_cls(cls):


@functools.lru_cache(1)
def namedtuple_fields(cls) -> Tuple[str, ...]:
def namedtuple_fields(cls) -> tuple[str, ...]:
"""Get the fields of a namedtuple or a torch.return_types.* quasi-namedtuple"""
if cls is slice:
return ("start", "stop", "step")
Expand Down Expand Up @@ -2188,7 +2187,7 @@ def tuple_iterator_getitem(it, index):
iter_next = next


def normalize_range_iter(range_iter) -> Tuple[int, int, int]:
def normalize_range_iter(range_iter) -> tuple[int, int, int]:
_, (range_obj,), maybe_idx = range_iter.__reduce__()
# In 3.12+, `maybe_idx` could be None, and `range_obj.start` would've been
# already incremented by the current index.
Expand Down Expand Up @@ -3070,7 +3069,7 @@ def tensor_always_has_static_shape(
tensor: Union[torch.Tensor, Any],
is_tensor: bool,
tensor_source: Source,
) -> Tuple[bool, Optional[TensorStaticReason]]:
) -> tuple[bool, Optional[TensorStaticReason]]:
"""
Given a tensor, source, and is_tensor flag, determine if a shape should be static.
Expand Down

0 comments on commit 9b7cac9

Please sign in to comment.