o
    Wh                     @   s.   d Z ddlZddlmZ G dd dejZdS )zActivation modules.    Nc                       s:   e Zd ZdZd	d
 fddZdejdejfddZ  ZS )AGLUa4  
    Unified activation function module from AGLU.

    This class implements a parameterized activation function with learnable parameters lambda and kappa, based on the
    AGLU (Adaptive Gated Linear Unit) approach (https://github.com/kostas1515/AGLU).

    Attributes:
        act (nn.Softplus): Softplus activation function with negative beta.
        lambd (nn.Parameter): Learnable lambda parameter initialized with uniform distribution.
        kappa (nn.Parameter): Learnable kappa parameter initialized with uniform distribution.

    Methods:
        forward: Compute the forward pass of the Unified activation function.

    Examples:
        >>> import torch
        >>> m = AGLU()
        >>> input = torch.randn(2)
        >>> output = m(input)
        >>> print(output.shape)
        torch.Size([2])
    Nreturnc              	      s\   t    tjdd| _ttjtj	d||d| _
ttjtj	d||d| _dS )zEInitialize the Unified activation function with learnable parameters.g      )beta   )devicedtypeN)super__init__nnSoftplusact	Parameterinituniform_torchemptylambdkappa)selfr   r   	__class__ U/var/www/vscode/kcb/lib/python3.10/site-packages/ultralytics/nn/modules/activation.pyr	       s   
 $zAGLU.__init__xc              	   C   s8   t j| jdd}t d| | | j| t |  S )a  
        Apply the Adaptive Gated Linear Unit (AGLU) activation function.

        This forward method implements the AGLU activation function with learnable parameters lambda and kappa.
        The function applies a transformation that adaptively combines linear and non-linear components.

        Args:
            x (torch.Tensor): Input tensor to apply the activation function to.

        Returns:
            (torch.Tensor): Output tensor after applying the AGLU activation function, with the same shape as the input.
        g-C6?)minr   )r   clampr   expr   r   log)r   r   lamr   r   r   forward'   s   (zAGLU.forward)NN)r   N)	__name__
__module____qualname____doc__r	   r   Tensorr   __classcell__r   r   r   r   r      s    r   )r#   r   torch.nnr
   Moduler   r   r   r   r   <module>   s   