a
    h                     @   sB   d dl mZ ddlmZmZ ddlmZ G dd deZdgZdS )   )PretrainedConfig   )CONFIG_MAPPING
AutoConfig)SuperPointConfigc                       sF   e Zd ZdZdZdeiZdeeeee	e	e	e	e
ed
 fddZ  ZS )LightGlueConfiga  
    This is the configuration class to store the configuration of a [`LightGlueForKeypointMatching`]. It is used to
    instantiate a LightGlue model according to the specified arguments, defining the model architecture. Instantiating a
    configuration with the defaults will yield a similar configuration to that of the LightGlue
    [ETH-CVG/lightglue_superpoint](https://huggingface.co/ETH-CVG/lightglue_superpoint) architecture.

    Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
    documentation from [`PretrainedConfig`] for more information.

    Args:
        keypoint_detector_config (`Union[AutoConfig, dict]`,  *optional*, defaults to `SuperPointConfig`):
            The config object or dictionary of the keypoint detector.
        descriptor_dim (`int`, *optional*, defaults to 256):
            The dimension of the descriptors.
        num_hidden_layers (`int`, *optional*, defaults to 9):
            The number of self and cross attention layers.
        num_attention_heads (`int`, *optional*, defaults to 4):
            The number of heads in the multi-head attention.
        num_key_value_heads (`int`, *optional*):
            This is the number of key_value heads that should be used to implement Grouped Query Attention. If
            `num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
            `num_key_value_heads=1` the model will use Multi Query Attention (MQA) otherwise GQA is used. When
            converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
            by meanpooling all the original heads within that group. For more details checkout [this
            paper](https://huggingface.co/papers/2305.13245). If it is not specified, will default to
            `num_attention_heads`.
        depth_confidence (`float`, *optional*, defaults to 0.95):
            The confidence threshold used to perform early stopping
        width_confidence (`float`, *optional*, defaults to 0.99):
            The confidence threshold used to prune points
        filter_threshold (`float`, *optional*, defaults to 0.1):
            The confidence threshold used to filter matches
        initializer_range (`float`, *optional*, defaults to 0.02):
            The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
        hidden_act (`str`, *optional*, defaults to `"gelu"`):
            The activation function to be used in the hidden layers.
        attention_dropout (`float`, *optional*, defaults to 0.0):
            The dropout ratio for the attention probabilities.
        attention_bias (`bool`, *optional*, defaults to `True`):
            Whether to use a bias in the query, key, value and output projection layers during self-attention.
        trust_remote_code (`bool`, *optional*, defaults to `False`):
            Whether to trust remote code when using other models than SuperPoint as keypoint detector.

    Examples:
        ```python
        >>> from transformers import LightGlueConfig, LightGlueForKeypointMatching

        >>> # Initializing a LightGlue style configuration
        >>> configuration = LightGlueConfig()

        >>> # Initializing a model from the LightGlue style configuration
        >>> model = LightGlueForKeypointMatching(configuration)

        >>> # Accessing the model configuration
        >>> configuration = model.config
        ```
    Z	lightgluekeypoint_detector_configN   	      ffffff?Gz?皙?{Gz?gelu        TF)
r   descriptor_dimnum_hidden_layersnum_attention_headsdepth_confidencewidth_confidencefilter_thresholdinitializer_range
hidden_acttrust_remote_codec                    s  || _ || dkrtd|| _|| _|| _|d u r8|}|| _|| _|| _|| _|	| _	t
|tr|dd|d< |d tvrtj|d | j d}nt|d  f i |ddi}|d u rtd dd	}|| _|| _|d
 | _|
| _|| _|| _t jf i | d S )N    z1descriptor_dim % num_heads is different from zero
model_type
superpointZ_name_or_path)r   attn_implementationeager)r   r   )r   
ValueErrorr   r   r   num_key_value_headsr   r   r   r   
isinstancedictgetr   r   Zfrom_pretrainedr   Zhidden_sizeZintermediate_sizer   attention_dropoutattention_biassuper__init__)selfr   r   r   r   r!   r   r   r   r   r   r%   r&   r   kwargs	__class__ q/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/lightglue/configuration_lightglue.pyr(   Y   sB    


zLightGlueConfig.__init__)Nr	   r
   r   Nr   r   r   r   r   r   TF)__name__
__module____qualname____doc__r   r   Zsub_configsr   intfloatstrboolr(   __classcell__r-   r-   r+   r.   r      s8   :             r   N)	Zconfiguration_utilsr   autor   r   r   r   r   __all__r-   r-   r-   r.   <module>   s
    