Leaky ReLu

ReLu sets all negative regions to 0. Leaky ReLu sets the negative regions to a linear relation with slope $\alpha$,

$$ \begin{cases} x, & \text{if }x>=0 \\ \alpha x, & \text{else.} \end{cases} $$

Visualizations

Leaky ReLu with $\alpha=0.2$

Derivative of Leaky ReLu with $\alpha=0.2$. Notice that the derivative is $0.2$ for $x<0$.

Code

def leaky_relu(x, alpha):
    return torch.where(x > 0, x, alpha * x)
Full code to generate the data used in this article

Full code to generate the data used in this article

from torch import nn
import matplotlib.pyplot as plt
import torch
from typing import Union, Optional
from pathlib import Path
import json


def visualize_activation(
    x: torch.Tensor, acti: torch.nn.Module,
    save_path: Optional[Union[str, Path]] = None
) -> dict:
    """Visualize activation function on the domain of x"""

    y = acti(x)

    # Calculate the grad of the activation function
    x = x.clone().requires_grad_()
    acti(x).sum().backward()
    yp = x.grad

    activation_dict = {
        "x": x.detach().numpy().tolist(),
        "y": y.detach().numpy().tolist(),
        "yp": yp.detach().numpy().tolist()
    }

    if save_path is not None:
        if isinstance(save_path, str):
            save_path = Path(save_path)
        save_path.parent.mkdir(parents=True, exist_ok=True)
        with open(save_path, "w") as f:
            json.dump(activation_dict, f, indent=4)

    return activation_dict

class LeakyReLU(nn.Module):

    def __init__(self, alpha: float = 0.2) -> None:
        super().__init__()
        self.alpha = alpha

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        return torch.where(x > 0, x, self.alpha * x)

    def __str__(self) -> str:
        return f"Activation Function: {super().__str__()}"


if __name__ == "__main__":

        lelu = LeakyReLU()

    print(lelu)

    save_path = "data/activations/leaky_relu.json"
    x = torch.linspace(-2, 2, 1000)
    data = visualize_activation(x, lelu, save_path=save_path)

    fig, ax = plt.subplots()
    ax.plot(data["x"], data["y"])
    ax.plot(data["x"], data["yp"])
    ax.set_title("Leaky ReLU")
    plt.show()

    pass

Planted: by ;

Dynamic Backlinks to cards/machine-learning/neural-networks/activation-leaky-relu:
cards/machine-learning/neural-networks/activation-leaky-relu Links to:

L Ma (2018). 'Leaky ReLu', Datumorphism, 11 April. Available at: https://datumorphism.leima.is/cards/machine-learning/neural-networks/activation-leaky-relu/.