Skip to content

Commit

Permalink
ruff: fix type import errors
Browse files Browse the repository at this point in the history
  • Loading branch information
alexfikl authored and inducer committed Nov 4, 2024
1 parent dbc69c6 commit 64eae19
Show file tree
Hide file tree
Showing 15 changed files with 142 additions and 159 deletions.
5 changes: 2 additions & 3 deletions examples/moving-geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
"""

import logging
from typing import Optional, Type

import numpy as np

Expand Down Expand Up @@ -104,7 +103,7 @@ def advance(actx, dt, t, x, fn):

def run(actx, *,
ambient_dim: int = 3,
resolution: Optional[int] = None,
resolution: int | None = None,
target_order: int = 4,
tmax: float = 1.0,
timestep: float = 1.0e-2,
Expand All @@ -128,7 +127,7 @@ def run(actx, *,
# a bit of work when reconstructing after a time step

if group_factory_name == "warp_and_blend":
group_factory_cls: Type[poly.HomogeneousOrderBasedGroupFactory] = (
group_factory_cls: type[poly.HomogeneousOrderBasedGroupFactory] = (
poly.PolynomialWarpAndBlend2DRestrictingGroupFactory)

unit_nodes = mp.warp_and_blend_nodes(ambient_dim - 1, mesh_order)
Expand Down
17 changes: 9 additions & 8 deletions meshmode/discretization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@
"""

from abc import ABC, abstractmethod
from typing import Hashable, Iterable, Optional, Protocol, runtime_checkable
from collections.abc import Hashable, Iterable
from typing import Protocol, runtime_checkable
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -353,7 +354,7 @@ def __init__(self,
actx: ArrayContext,
mesh: _Mesh,
group_factory: ElementGroupFactory,
real_dtype: Optional[np.dtype] = None,
real_dtype: np.dtype | None = None,
_force_actx_clone: bool = True) -> None:
"""
:arg actx: an :class:`arraycontext.ArrayContext` used to perform
Expand Down Expand Up @@ -397,10 +398,10 @@ def __init__(self,
self._cached_nodes = None

def copy(self,
actx: Optional[ArrayContext] = None,
mesh: Optional[_Mesh] = None,
group_factory: Optional[ElementGroupFactory] = None,
real_dtype: Optional[np.dtype] = None) -> "Discretization":
actx: ArrayContext | None = None,
mesh: _Mesh | None = None,
group_factory: ElementGroupFactory | None = None,
real_dtype: np.dtype | None = None) -> "Discretization":
"""Creates a new object of the same type with all arguments that are not
*None* replaced. The copy is not recursive.
"""
Expand Down Expand Up @@ -461,7 +462,7 @@ def _new_array(self, actx, creation_func, dtype=None):
for grp in self.groups)))

def empty(self, actx: ArrayContext,
dtype: Optional[np.dtype] = None) -> _DOFArray:
dtype: np.dtype | None = None) -> _DOFArray:
"""Return an empty :class:`~meshmode.dof_array.DOFArray`.
:arg dtype: type special value 'c' will result in a
Expand All @@ -479,7 +480,7 @@ def empty(self, actx: ArrayContext,
return self._new_array(actx, actx.np.zeros, dtype=dtype)

def zeros(self, actx: ArrayContext,
dtype: Optional[np.dtype] = None) -> _DOFArray:
dtype: np.dtype | None = None) -> _DOFArray:
"""Return a zero-initialized :class:`~meshmode.dof_array.DOFArray`.
:arg dtype: type special value 'c' will result in a
Expand Down
25 changes: 13 additions & 12 deletions meshmode/discretization/connection/direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
"""

from abc import ABC, abstractmethod
from collections.abc import Sequence
from dataclasses import dataclass
from typing import Generic, List, Optional, Sequence, Tuple
from typing import Generic

import numpy as np

Expand Down Expand Up @@ -51,7 +52,7 @@


def _reshape_and_preserve_tags(
actx: ArrayContext, ary: ArrayT, new_shape: Tuple[int, ...]) -> ArrayT:
actx: ArrayContext, ary: ArrayT, new_shape: tuple[int, ...]) -> ArrayT:
try:
tags = ary.tags
except AttributeError:
Expand Down Expand Up @@ -126,19 +127,19 @@ class InterpolationBatch(Generic[ArrayT]):
from_element_indices: ArrayT
to_element_indices: ArrayT
result_unit_nodes: np.ndarray
to_element_face: Optional[int]
to_element_face: int | None

def __post_init__(self):
self._global_from_element_indices_cache: \
Optional[Tuple[ArrayT, ArrayT]] = None
self._global_from_element_indices_cache: (
tuple[ArrayT, ArrayT] | None) = None

@property
def nelements(self) -> int:
return len(self.from_element_indices)

def _global_from_element_indices(
self, actx: ArrayContext, to_group: ElementGroupBase
) -> Tuple[ArrayT, ArrayT]:
) -> tuple[ArrayT, ArrayT]:
"""Returns a version of :attr:`from_element_indices` that is usable
without :attr:`to_element_indices`, consisting of a tuple.
The first entry of the tuple is an array of flags indicating
Expand Down Expand Up @@ -408,7 +409,7 @@ def _resample_matrix(self, actx: ArrayContext, to_group_index: int,
# {{{ _resample_point_pick_indices

def _resample_point_pick_indices(self, to_group_index: int, ibatch_index: int,
tol_multiplier: Optional[float] = None):
tol_multiplier: float | None = None):
"""If :meth:`_resample_matrix` *R* is a row subset of a permutation
matrix *P*, return the index subset I so that ``x[I] == R @ x`` up to
machine epsilon multiplied by *tol_multiplier* (or an internally
Expand All @@ -431,7 +432,7 @@ def _resample_point_pick_indices(self, to_group_index: int, ibatch_index: int,
tol_multiplier=None: (to_group_index, ibatch_index, tol_multiplier))
def _frozen_resample_point_pick_indices(self, actx: ArrayContext,
to_group_index: int, ibatch_index: int,
tol_multiplier: Optional[float] = None):
tol_multiplier: float | None = None):
result = self._resample_point_pick_indices(
to_group_index=to_group_index,
ibatch_index=ibatch_index,
Expand All @@ -444,7 +445,7 @@ def _frozen_resample_point_pick_indices(self, actx: ArrayContext,
# }}}

@memoize_method
def is_permutation(self, tol_multiplier: Optional[float] = None) -> bool:
def is_permutation(self, tol_multiplier: float | None = None) -> bool:
"""Return *True* if no interpolation is used in applying this connection,
i.e. if the source unit nodes in the connection
(cf. :class:`InterpolationBatch.result_unit_nodes`) match up
Expand All @@ -463,7 +464,7 @@ def is_permutation(self, tol_multiplier: Optional[float] = None) -> bool:

def _per_target_group_pick_info(
self, actx: ArrayContext, i_tgrp: int
) -> Optional[Sequence[_FromGroupPickData]]:
) -> Sequence[_FromGroupPickData] | None:
"""Returns a list of :class:`_FromGroupPickData`, one per source group
from which data is to be transferred, or *None*, if conditions for
this representation are not met.
Expand All @@ -487,7 +488,7 @@ def _per_target_group_pick_info(
if not batch_source_groups:
return None

result: List[_FromGroupPickData] = []
result: list[_FromGroupPickData] = []
for source_group_index in batch_source_groups:
batch_indices_for_this_source_group = [
i for i, batch in enumerate(cgrp.batches)
Expand Down Expand Up @@ -552,7 +553,7 @@ def _per_target_group_pick_info(

def _global_point_pick_info(
self, actx: ArrayContext
) -> Sequence[Optional[Sequence[_FromGroupPickData]]]:
) -> Sequence[Sequence[_FromGroupPickData] | None]:
if self._global_point_pick_info_cache is not None:
return self._global_point_pick_info_cache

Expand Down
5 changes: 2 additions & 3 deletions meshmode/discretization/connection/face.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

import logging
from dataclasses import dataclass
from typing import Optional

import numpy as np

Expand Down Expand Up @@ -165,7 +164,7 @@ def make_face_restriction(
discr: Discretization,
group_factory: ElementGroupFactory,
boundary_tag: BoundaryTag,
per_face_groups: Optional[bool] = False
per_face_groups: bool | None = False
) -> DirectDiscretizationConnection:
"""Create a mesh, a discretization and a connection to restrict
a function on *discr* to its values on the edges of element faces
Expand Down Expand Up @@ -383,7 +382,7 @@ def make_face_to_all_faces_embedding(
actx: ArrayContext,
faces_connection: DirectDiscretizationConnection,
all_faces_discr: Discretization,
from_discr: Optional[Discretization] = None
from_discr: Discretization | None = None
) -> DirectDiscretizationConnection:
"""Return a
:class:`meshmode.discretization.connection.DiscretizationConnection`
Expand Down
8 changes: 4 additions & 4 deletions meshmode/discretization/visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import logging
from dataclasses import dataclass
from functools import singledispatch
from typing import Any, Dict, List, Optional, Tuple
from typing import Any

import numpy as np

Expand Down Expand Up @@ -864,13 +864,13 @@ def write_vtk_file(self, file_name, names_and_fields,
# {{{ vtkhdf

def write_vtkhdf_file(self,
file_name: str, names_and_fields: List[Tuple[str, Any]], *,
file_name: str, names_and_fields: list[tuple[str, Any]], *,
comm=None,
use_high_order: bool = False,
real_only: bool = False,
overwrite: bool = False,
h5_file_options: Optional[Dict[str, Any]] = None,
dset_options: Optional[Dict[str, Any]] = None) -> None:
h5_file_options: dict[str, Any] | None = None,
dset_options: dict[str, Any] | None = None) -> None:
"""Write a VTK HDF5 file (typical extension ``'.hdf'``) containing
the visualization fields in *names_and_fields*.
Expand Down
13 changes: 7 additions & 6 deletions meshmode/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,9 @@
THE SOFTWARE.
"""

from collections.abc import Hashable, Mapping, Sequence
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Hashable, List, Mapping, Sequence, Union, cast
from typing import TYPE_CHECKING, Any, cast
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -158,7 +159,7 @@ def receive_mesh_part(self):
# between two parts on the same rank.
@dataclass
class RemoteGroupInfo:
inter_part_adj_groups: List[InterPartAdjacencyGroup]
inter_part_adj_groups: list[InterPartAdjacencyGroup]
vol_elem_indices: np.ndarray
bdry_elem_indices: np.ndarray
bdry_faces: np.ndarray
Expand Down Expand Up @@ -237,12 +238,12 @@ class MPIBoundaryCommSetupHelper:
def __init__(self,
mpi_comm: "mpi4py.MPI.Intracomm",
actx: ArrayContext,
inter_rank_bdry_info: Union[
inter_rank_bdry_info: (
# new-timey
Sequence[InterRankBoundaryInfo],
Sequence[InterRankBoundaryInfo]
# old-timey, for compatibility
Mapping[int, DirectDiscretizationConnection],
],
| Mapping[int, DirectDiscretizationConnection]
),
bdry_grp_factory: ElementGroupFactory):
"""
:arg bdry_grp_factory: Group factory to use when creating the remote-to-local
Expand Down
3 changes: 2 additions & 1 deletion meshmode/dof_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,11 @@

import operator as op
import threading
from collections.abc import Callable, Iterable
from contextlib import contextmanager
from functools import partial, update_wrapper
from numbers import Number
from typing import Any, Callable, Iterable
from typing import Any
from warnings import warn

import numpy as np
Expand Down
22 changes: 8 additions & 14 deletions meshmode/mesh/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,14 @@
"""

from abc import ABC, abstractmethod
from collections.abc import Callable, Collection, Hashable, Iterable, Mapping, Sequence
from dataclasses import InitVar, dataclass, field, replace
from typing import (
Any,
Callable,
ClassVar,
Collection,
Hashable,
Iterable,
Literal,
Mapping,
Sequence,
TypeAlias,
TypeVar,
Union,
)
from warnings import warn

Expand Down Expand Up @@ -758,13 +752,13 @@ def as_python(self) -> str:

# {{{ mesh

DTypeLike = Union[np.dtype, np.generic]
NodalAdjacencyLike = Union[
Literal[False], Iterable[np.ndarray], NodalAdjacency
]
FacialAdjacencyLike = Union[
Literal[False], Sequence[Sequence[FacialAdjacencyGroup]]
]
DTypeLike = np.dtype | np.generic
NodalAdjacencyLike = (
Literal[False] | Iterable[np.ndarray] | NodalAdjacency
)
FacialAdjacencyLike = (
Literal[False] | Sequence[Sequence[FacialAdjacencyGroup]]
)


def check_mesh_consistency(
Expand Down
Loading

0 comments on commit 64eae19

Please sign in to comment.