-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Closes #XYZ ### Summary of Changes <!-- Please provide a summary of changes in this pull request, ensuring all changes are explained. --> added GRU-Layer --------- Co-authored-by: megalinter-bot <129584137+megalinter-bot@users.noreply.github.com>
- Loading branch information
1 parent
d4680d4
commit d33cb5d
Showing
5 changed files
with
314 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,97 @@ | ||
from __future__ import annotations | ||
|
||
import sys | ||
from typing import TYPE_CHECKING, Any | ||
|
||
from safeds._utils import _structural_hash | ||
from safeds._validation import _check_bounds, _ClosedBound | ||
from safeds.ml.nn.typing import ModelImageSize | ||
|
||
from ._layer import Layer | ||
|
||
if TYPE_CHECKING: | ||
from torch import nn | ||
|
||
|
||
class GRULayer(Layer): | ||
""" | ||
A gated recurrent unit (GRU) layer. | ||
Parameters | ||
---------- | ||
neuron_count: | ||
The number of neurons in this layer | ||
Raises | ||
------ | ||
OutOfBoundsError | ||
If input_size < 1 | ||
If output_size < 1 | ||
""" | ||
|
||
def __init__(self, neuron_count: int): | ||
_check_bounds("neuron_count", neuron_count, lower_bound=_ClosedBound(1)) | ||
|
||
self._input_size: int | None = None | ||
self._output_size = neuron_count | ||
|
||
def _get_internal_layer(self, **kwargs: Any) -> nn.Module: | ||
from ._internal_layers import _InternalGRULayer # Slow import on global level | ||
|
||
if "activation_function" not in kwargs: | ||
raise ValueError( | ||
"The activation_function is not set. The internal layer can only be created when the activation_function is provided in the kwargs.", | ||
) | ||
else: | ||
activation_function: str = kwargs["activation_function"] | ||
|
||
if self._input_size is None: | ||
raise ValueError("The input_size is not yet set.") | ||
|
||
return _InternalGRULayer(self._input_size, self._output_size, activation_function) | ||
|
||
@property | ||
def input_size(self) -> int: | ||
""" | ||
Get the input_size of this layer. | ||
Returns | ||
------- | ||
result: | ||
The amount of values being passed into this layer. | ||
""" | ||
if self._input_size is None: | ||
raise ValueError("The input_size is not yet set.") | ||
return self._input_size | ||
|
||
@property | ||
def output_size(self) -> int: | ||
""" | ||
Get the output_size of this layer. | ||
Returns | ||
------- | ||
result: | ||
The number of neurons in this layer. | ||
""" | ||
return self._output_size | ||
|
||
def _set_input_size(self, input_size: int | ModelImageSize) -> None: | ||
if isinstance(input_size, ModelImageSize): | ||
raise TypeError("The input_size of a forward layer has to be of type int.") | ||
|
||
self._input_size = input_size | ||
|
||
def __hash__(self) -> int: | ||
return _structural_hash( | ||
self._input_size, | ||
self._output_size, | ||
) # pragma: no cover | ||
|
||
def __eq__(self, other: object) -> bool: | ||
if not isinstance(other, GRULayer): | ||
return NotImplemented | ||
return (self is other) or (self._input_size == other._input_size and self._output_size == other._output_size) | ||
|
||
def __sizeof__(self) -> int: | ||
return sys.getsizeof(self._input_size) + sys.getsizeof(self._output_size) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,189 @@ | ||
import sys | ||
from typing import Any | ||
|
||
import pytest | ||
from safeds.data.image.typing import ImageSize | ||
from safeds.exceptions import OutOfBoundsError | ||
from safeds.ml.nn.layers import GRULayer | ||
from torch import nn | ||
|
||
|
||
@pytest.mark.parametrize( | ||
("activation_function", "expected_activation_function"), | ||
[ | ||
("sigmoid", nn.Sigmoid), | ||
("relu", nn.ReLU), | ||
("softmax", nn.Softmax), | ||
("none", None), | ||
], | ||
ids=["sigmoid", "relu", "softmax", "none"], | ||
) | ||
def test_should_accept_activation_function(activation_function: str, expected_activation_function: type | None) -> None: | ||
lstm_layer = GRULayer(neuron_count=1) | ||
lstm_layer._input_size = 1 | ||
internal_layer = lstm_layer._get_internal_layer( | ||
activation_function=activation_function, | ||
) | ||
assert ( | ||
internal_layer._fn is None | ||
if expected_activation_function is None | ||
else isinstance(internal_layer._fn, expected_activation_function) | ||
) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"activation_function", | ||
[ | ||
"unknown_string", | ||
], | ||
ids=["unknown"], | ||
) | ||
def test_should_raise_if_unknown_activation_function_is_passed(activation_function: str) -> None: | ||
lstm_layer = GRULayer(neuron_count=1) | ||
lstm_layer._input_size = 1 | ||
with pytest.raises( | ||
ValueError, | ||
match=rf"Unknown Activation Function: {activation_function}", | ||
): | ||
lstm_layer._get_internal_layer( | ||
activation_function=activation_function, | ||
) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"output_size", | ||
[ | ||
0, | ||
], | ||
ids=["output_size_out_of_bounds"], | ||
) | ||
def test_should_raise_if_output_size_out_of_bounds(output_size: int) -> None: | ||
with pytest.raises(OutOfBoundsError): | ||
GRULayer(neuron_count=output_size) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"output_size", | ||
[ | ||
1, | ||
20, | ||
], | ||
ids=["one", "twenty"], | ||
) | ||
def test_should_raise_if_output_size_doesnt_match(output_size: int) -> None: | ||
assert GRULayer(neuron_count=output_size).output_size == output_size | ||
|
||
|
||
def test_should_raise_if_input_size_is_set_with_image_size() -> None: | ||
layer = GRULayer(1) | ||
with pytest.raises(TypeError, match=r"The input_size of a forward layer has to be of type int."): | ||
layer._set_input_size(ImageSize(1, 2, 3)) | ||
|
||
|
||
def test_should_raise_if_activation_function_not_set() -> None: | ||
layer = GRULayer(1) | ||
with pytest.raises( | ||
ValueError, | ||
match=r"The activation_function is not set. The internal layer can only be created when the activation_function is provided in the kwargs.", | ||
): | ||
layer._get_internal_layer() | ||
|
||
|
||
@pytest.mark.parametrize( | ||
("layer1", "layer2", "equal"), | ||
[ | ||
( | ||
GRULayer(neuron_count=2), | ||
GRULayer(neuron_count=2), | ||
True, | ||
), | ||
( | ||
GRULayer(neuron_count=2), | ||
GRULayer(neuron_count=1), | ||
False, | ||
), | ||
], | ||
ids=["equal", "not equal"], | ||
) | ||
def test_should_compare_forward_layers(layer1: GRULayer, layer2: GRULayer, equal: bool) -> None: | ||
assert (layer1.__eq__(layer2)) == equal | ||
|
||
|
||
def test_should_assert_that_forward_layer_is_equal_to_itself() -> None: | ||
layer = GRULayer(neuron_count=1) | ||
assert layer.__eq__(layer) | ||
|
||
|
||
@pytest.mark.parametrize( | ||
("layer", "other"), | ||
[ | ||
(GRULayer(neuron_count=1), None), | ||
], | ||
ids=["ForwardLayer vs. None"], | ||
) | ||
def test_should_return_not_implemented_if_other_is_not_forward_layer(layer: GRULayer, other: Any) -> None: | ||
assert (layer.__eq__(other)) is NotImplemented | ||
|
||
|
||
@pytest.mark.parametrize( | ||
("layer1", "layer2"), | ||
[ | ||
( | ||
GRULayer(neuron_count=2), | ||
GRULayer(neuron_count=2), | ||
), | ||
], | ||
ids=["equal"], | ||
) | ||
def test_should_assert_that_equal_forward_layers_have_equal_hash(layer1: GRULayer, layer2: GRULayer) -> None: | ||
assert layer1.__hash__() == layer2.__hash__() | ||
|
||
|
||
@pytest.mark.parametrize( | ||
("layer1", "layer2"), | ||
[ | ||
( | ||
GRULayer(neuron_count=2), | ||
GRULayer(neuron_count=1), | ||
), | ||
], | ||
ids=["not equal"], | ||
) | ||
def test_should_assert_that_different_forward_layers_have_different_hash( | ||
layer1: GRULayer, | ||
layer2: GRULayer, | ||
) -> None: | ||
assert layer1.__hash__() != layer2.__hash__() | ||
|
||
|
||
@pytest.mark.parametrize( | ||
"layer", | ||
[ | ||
GRULayer(neuron_count=1), | ||
], | ||
ids=["one"], | ||
) | ||
def test_should_assert_that_layer_size_is_greater_than_normal_object(layer: GRULayer) -> None: | ||
assert sys.getsizeof(layer) > sys.getsizeof(object()) | ||
|
||
|
||
def test_set_input_size() -> None: | ||
layer = GRULayer(1) | ||
layer._set_input_size(3) | ||
assert layer.input_size == 3 | ||
|
||
|
||
def test_input_size_should_raise_error() -> None: | ||
layer = GRULayer(1) | ||
layer._input_size = None | ||
with pytest.raises( | ||
ValueError, | ||
match="The input_size is not yet set.", | ||
): | ||
layer.input_size # noqa: B018 | ||
|
||
|
||
def test_internal_layer_should_raise_error() -> None: | ||
layer = GRULayer(1) | ||
with pytest.raises(ValueError, match="The input_size is not yet set."): | ||
layer._get_internal_layer(activation_function="relu") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters