Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
rsokl committed Sep 7, 2024
1 parent 70950e0 commit 7ca263b
Show file tree
Hide file tree
Showing 16 changed files with 75 additions and 232 deletions.
3 changes: 3 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[flake8]
extend-ignore = F811,D1,D205,D209,D213,D400,D401,D999,D202,E203,E501,W503,E721,F403,F405,E701
exclude = .git,__pycache__,docs,old,build,dis,tests/annotations/*, tests/test_py310.py,docs/*, src/mygrad/__init__.py, src/mygrad/numpy_compat/__init__.py,src/mygrad/nnet/__init__.py
130 changes: 0 additions & 130 deletions setup.cfg

This file was deleted.

2 changes: 1 addition & 1 deletion src/mygrad/indexing_routines/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def where(
x: ArrayLike = _NoValue,
y: ArrayLike = _NoValue,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
where(condition, [x, y])
Expand Down
2 changes: 1 addition & 1 deletion src/mygrad/linalg/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def __call__(
axis=None,
keepdims: bool = False,
*,
nan_to_num: bool = True
nan_to_num: bool = True,
):
self.variables = (tensor,)
self._nan_to_num = nan_to_num
Expand Down
14 changes: 7 additions & 7 deletions src/mygrad/math/sequential/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def sum(
axis: Axis = None,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Sum of tensor elements over a given axis.
Expand Down Expand Up @@ -124,7 +124,7 @@ def mean(
axis: Axis = None,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Mean of tensor elements over a given axis.
Expand Down Expand Up @@ -202,7 +202,7 @@ def var(
ddof: int = 0,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Compute the variance along the specified axis.
Expand Down Expand Up @@ -298,7 +298,7 @@ def std(
ddof: int = 0,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Compute the standard deviation along the specified axis.
Expand Down Expand Up @@ -392,7 +392,7 @@ def max(
axis: Axis = None,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Return the maximum of a tensor or maximum along its axes.
Expand Down Expand Up @@ -457,7 +457,7 @@ def min(
axis: Axis = None,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Return the minimum of a tensor or minimum along its axes.
Expand Down Expand Up @@ -527,7 +527,7 @@ def prod(
axis: Axis = None,
keepdims: bool = False,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""
Return the product of array elements over given axes.
Expand Down
4 changes: 2 additions & 2 deletions src/mygrad/nnet/activations/softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def softmax(
x: ArrayLike,
axis: Union[None, int, Tuple[int, ...]] = -1,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
r"""
Applies the softmax activation function::
Expand Down Expand Up @@ -115,7 +115,7 @@ def logsoftmax(
x: ArrayLike,
axis: Union[None, int, Tuple[int, ...]] = -1,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
r"""
Applies the log-softmax activation function::
Expand Down
2 changes: 1 addition & 1 deletion src/mygrad/nnet/losses/margin_ranking_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def margin_ranking_loss(
y: ArrayLike,
margin: float,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
r"""Computes the margin average margin ranking loss.
Equivalent to::
Expand Down
2 changes: 1 addition & 1 deletion src/mygrad/nnet/losses/multiclass_hinge.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def multiclass_hinge(
y_true: ArrayLike,
hinge: float = 1.0,
*,
constant: Optional[bool] = None
constant: Optional[bool] = None,
) -> Tensor:
"""Computes the average multiclass hinge loss.
Expand Down
Loading

0 comments on commit 7ca263b

Please sign in to comment.