Входной файл: | Стандартный вход | Ограничение времени: | 1 сек | |
Выходной файл: | Стандартный выход | Ограничение памяти: | 512 Мб | |
Максимальный балл: | 18 |
Требуется реализовать на языке Python классы, описывающие различные функции активации:
import numpy as np
class Activation:
"""Base activation class"""
def __init__(self):
self._input = None
@property
def input(self):
"""Returns the last input received by the activation"""
return self._input
def __call__(self, x: np.ndarray) -> np.ndarray:
"""Computes activation output
Arguments:
x: Input array of shape (`batch_size`, ...)
Returns:
An array of the same shape as `x`"""
raise NotImplementedError()
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
"""Computes loss gradient with respect to the activation input.
Arguments:
gradOutput: Gradient of loss function with recpect to the activation output.
An array of the same shape as the array received in `__call__` method.
Returns:
An array of the same shape as `gradOutput`"""
raise NotImplementedError()
class ReLU(Activation):
"""Implements ReLU activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class LeakyReLU(Activation):
"""Implements LeakyReLU activation layer"""
def __init__(self, slope: float = 0.03):
"""Initializes LeakyReLU layer.
Arguments:
slope: the slope coeffitient of the activation."""
pass
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class GeLU(Activation):
"""Implements GeLU activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class SiLU(Activation):
"""Implements SiLU (swish) activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class Softplus(Activation):
"""Implements Softplus (SmoothReLU) activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class ELU(Activation):
"""Implements ELU activation layer"""
def __init__(self, alpha: float = 1):
"""Initializes ELU layer.
Arguments:
alpha: the alpha coeffitient of the activation."""
pass
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class Sigmoid(Activation):
"""Implements Sigmoid activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class Tanh(Activation):
"""Implements Tanh activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
class Softmax(Activation):
"""Implements Softmax activation layer"""
def __call__(self, x: np.ndarray) -> np.ndarray:
"""Computes Softmax activation output
Arguments:
x: Input array of shape (`batch_size`, `n_features`)
Returns:
An array of the same shape as `x`"""
pass
def grad(self, gradOutput: np.ndarray) -> np.ndarray:
pass
Функции активации тестируются в порядке их определения в задаче циклично, то есть activation_idx = (test_idx − 1) mod 9.
Код решения должен содержать только импортируемые модули, определение и реализацию классов.