Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: activation function for forward layer #891

Merged
Merged
Show file tree
Hide file tree
Changes from 33 commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
22f4648
added optional parameter for forward layer
Gerhardsa0 Jul 2, 2024
1d7f707
linter fixes
Gerhardsa0 Jul 2, 2024
0651df2
style: apply automated linter fixes
megalinter-bot Jul 2, 2024
ce55e4c
Merge branch 'main' into 889-feat-allow-activation-function-parameter…
Gerhardsa0 Jul 8, 2024
69d242c
linter fixes
Gerhardsa0 Jul 12, 2024
556445d
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 12, 2024
4bee8e1
changed docs for forward layer
Gerhardsa0 Jul 12, 2024
578236f
Merge branch 'main' into 889-feat-allow-activation-function-parameter…
Gerhardsa0 Jul 12, 2024
6b25c14
Merge branch 'main' into 889-feat-allow-activation-function-parameter…
Gerhardsa0 Jul 12, 2024
7fa3ecb
added requested changes
Gerhardsa0 Jul 15, 2024
6f55c07
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
bea6cc6
Merge branch 'main' of https://github.com/Safe-DS/Library into 889-fe…
Gerhardsa0 Jul 15, 2024
3bce4e9
merghed
Gerhardsa0 Jul 15, 2024
f0d0ba7
Linter fixes
Gerhardsa0 Jul 15, 2024
16d1901
Linter fixes
Gerhardsa0 Jul 15, 2024
b7ac39a
removed test, because it can not get called with literal
Gerhardsa0 Jul 15, 2024
12577f3
linter fixes
Gerhardsa0 Jul 15, 2024
125fd11
linter fixes
Gerhardsa0 Jul 15, 2024
ed7abb0
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
6fa6740
linter fixes
Gerhardsa0 Jul 15, 2024
488af2c
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
76531d4
code adjustmend
Gerhardsa0 Jul 15, 2024
671f9d6
code adjustmend
Gerhardsa0 Jul 15, 2024
28692a6
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
b059967
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
7900072
fixed dark mode snapshots
Gerhardsa0 Jul 15, 2024
0233f29
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
b942d54
test: removed cuda test for noise as it is not deterministic on diffe…
Marsmaennchen221 Jul 15, 2024
de0ddcc
Merge branch '889-feat-allow-activation-function-parameter-for-forwar…
Marsmaennchen221 Jul 15, 2024
af48f4f
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
0d3665e
fixed tick labels
Gerhardsa0 Jul 15, 2024
0252e6b
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
a3f7c56
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
59f99a7
Merge branch 'main' of https://github.com/Safe-DS/Library into 889-fe…
Gerhardsa0 Jul 15, 2024
af3ab13
added test and resolved merge conflict
Gerhardsa0 Jul 15, 2024
6ec55c2
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
b5c3c8d
linter changes
Gerhardsa0 Jul 15, 2024
a39368b
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
d5527a6
linter changes
Gerhardsa0 Jul 15, 2024
060f0f5
Merge remote-tracking branch 'origin/889-feat-allow-activation-functi…
Gerhardsa0 Jul 15, 2024
e0eb11a
linter changes
Gerhardsa0 Jul 15, 2024
a9c3f35
linter changes
Gerhardsa0 Jul 15, 2024
8722da4
linter changes
Gerhardsa0 Jul 15, 2024
f8646c3
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
4b70549
style: apply automated linter fixes
megalinter-bot Jul 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 39 additions & 39 deletions docs/tutorials/time_series_forecasting.ipynb

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ nav:
- Regression: tutorials/regression.ipynb
- Machine Learning: tutorials/machine_learning.ipynb
- Image Classification with Convolutional Neural Networks: tutorials/convolutional_neural_network_for_image_classification.ipynb
- Time series forecasting: tutorials/time_series_forecasting.ipynb
- API Reference: reference/
- Glossary: glossary.md
- Development:
Expand Down
2 changes: 1 addition & 1 deletion src/safeds/data/tabular/plotting/_table_plotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,7 +572,7 @@ def moving_average_plot(
ylabel=y_name,
)
ax.legend()
if self._table.get_column(x_name).is_temporal:
if self._table.get_column(x_name).is_temporal and self._table.get_column(x_name).row_count < 9:
Gerhardsa0 marked this conversation as resolved.
Show resolved Hide resolved
ax.set_xticks(x_data) # Set x-ticks to the x data points
ax.set_xticks(ax.get_xticks())
ax.set_xticklabels(
Expand Down
31 changes: 25 additions & 6 deletions src/safeds/ml/nn/layers/_forward_layer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, Literal

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound
Expand All @@ -20,19 +20,28 @@ class ForwardLayer(Layer):
----------
neuron_count:
The number of neurons in this layer
overwrite_activation_function:
The activation function used in the forward layer, if not set the activation will be set automatically

Raises
------
OutOfBoundsError
If input_size < 1
If output_size < 1
ValueError
If the given activation function does not exist
"""

def __init__(self, neuron_count: int):
def __init__(
self,
neuron_count: int,
overwrite_activation_function: Literal["sigmoid", "relu", "softmax", "none", "notset"] = "notset",
):
_check_bounds("neuron_count", neuron_count, lower_bound=_ClosedBound(1))

self._input_size: int | None = None
self._output_size = neuron_count
self._activation_function: str = overwrite_activation_function

def _get_internal_layer(self, **kwargs: Any) -> nn.Module:
from ._internal_layers import _InternalForwardLayer # Slow import on global level
Expand All @@ -41,8 +50,10 @@ def _get_internal_layer(self, **kwargs: Any) -> nn.Module:
raise ValueError(
"The activation_function is not set. The internal layer can only be created when the activation_function is provided in the kwargs.",
)
else:
elif self._activation_function == "notset":
activation_function: str = kwargs["activation_function"]
else:
activation_function = self._activation_function

if self._input_size is None:
raise ValueError("The input_size is not yet set.")
Expand Down Expand Up @@ -83,16 +94,24 @@ def _set_input_size(self, input_size: int | ModelImageSize) -> None:
self._input_size = input_size

def __hash__(self) -> int:
return _structural_hash(self._input_size, self._output_size)
return _structural_hash(self._input_size, self._output_size, self._activation_function)

def __eq__(self, other: object) -> bool:
if not isinstance(other, ForwardLayer):
return NotImplemented
if self is other:
return True
return self._input_size == other._input_size and self._output_size == other._output_size
return (
self._input_size == other._input_size
and self._output_size == other._output_size
and self._activation_function == other._activation_function
)

def __sizeof__(self) -> int:
import sys

return sys.getsizeof(self._input_size) + sys.getsizeof(self._output_size)
return (
sys.getsizeof(self._input_size)
+ sys.getsizeof(self._output_size)
+ sys.getsizeof(self._activation_function)
)
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Diff not rendered.
Diff not rendered.
3 changes: 2 additions & 1 deletion tests/safeds/data/image/containers/test_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -659,8 +659,8 @@ def test_should_raise(self, resource_path: str, device: Device) -> None:
image.adjust_brightness(-1)


@pytest.mark.parametrize("device", get_devices(), ids=get_devices_ids())
class TestAddNoise:
@pytest.mark.parametrize("device", [device_cpu], ids=["cpu"])
@pytest.mark.parametrize(
"standard_deviation",
[
Expand Down Expand Up @@ -690,6 +690,7 @@ def test_should_add_noise(
assert image_noise == snapshot_png_image
_assert_width_height_channel(image, image_noise)

@pytest.mark.parametrize("device", get_devices(), ids=get_devices_ids())
@pytest.mark.parametrize(
"standard_deviation",
[-1],
Expand Down
4 changes: 3 additions & 1 deletion tests/safeds/data/image/containers/test_image_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from tests.helpers import (
configure_test_with_device,
device_cpu,
get_devices,
get_devices_ids,
grayscale_jpg_path,
Expand Down Expand Up @@ -973,8 +974,8 @@ def test_all_transform_methods(
assert image_list_original == image_list_clone


@pytest.mark.parametrize("device", get_devices(), ids=get_devices_ids())
class TestTransforms:
@pytest.mark.parametrize("device", [device_cpu], ids=["cpu"])
@pytest.mark.parametrize(
"resource_path",
[images_all(), [plane_png_path, plane_jpg_path] * 2],
Expand Down Expand Up @@ -1007,6 +1008,7 @@ def test_should_add_noise(
assert image_list_original is not image_list_clone
assert image_list_original == image_list_clone

@pytest.mark.parametrize("device", get_devices(), ids=get_devices_ids())
@pytest.mark.parametrize(
"channel_in",
[1, 3, 4],
Expand Down
Diff not rendered.
30 changes: 29 additions & 1 deletion tests/safeds/ml/nn/layers/test_forward_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import sys
from typing import Any
from typing import Any, Literal

import pytest
from safeds.data.image.typing import ImageSize
Expand Down Expand Up @@ -177,3 +177,31 @@ def test_should_assert_that_different_forward_layers_have_different_hash(
)
def test_should_assert_that_layer_size_is_greater_than_normal_object(layer: ForwardLayer) -> None:
assert sys.getsizeof(layer) > sys.getsizeof(object())


@pytest.mark.parametrize(
("activation_function", "expected_activation_function"),
[
("sigmoid", nn.Sigmoid),
("relu", nn.ReLU),
("softmax", nn.Softmax),
("none", None),
],
ids=["sigmoid", "relu", "softmax", "none"],
)
def test_should_set_activation_function(
activation_function: Literal["sigmoid", "relu", "softmax", "none"],
expected_activation_function: type | None,
) -> None:
forward_layer: ForwardLayer = ForwardLayer(1, overwrite_activation_function=activation_function)
assert forward_layer is not None
forward_layer._input_size = 1
internal_layer = forward_layer._get_internal_layer(
activation_function="relu",
)
# check if the type gets overwritten by constructor
assert (
internal_layer._fn is None
if expected_activation_function is None
else isinstance(internal_layer._fn, expected_activation_function)
)
Loading