a
    h                     @  s   d dl mZ d dlmZ d dlmZ d dlmZ d dlm	  m
Z d dlmZm	Z	 d dlmZ d dlmZmZmZ G d	d
 d
eZG dd de	jZdS )    )annotations)Iterable)Enum)AnyN)Tensornn)SentenceTransformer)pairwise_cos_simpairwise_euclidean_simpairwise_manhattan_simc                   @  s(   e Zd ZdZdd Zdd Zdd ZdS )TripletDistanceMetriczThe metric for the triplet lossc                 C  s   dt | | S )N   )r	   xy r   d/var/www/html/assistant/venv/lib/python3.9/site-packages/sentence_transformers/losses/TripletLoss.py<lambda>       zTripletDistanceMetric.<lambda>c                 C  s
   t | |S N)r
   r   r   r   r   r      r   c                 C  s
   t | |S r   )r   r   r   r   r   r      r   N)__name__
__module____qualname____doc__ZCOSINE	EUCLIDEANZ	MANHATTANr   r   r   r   r      s   r   c                      sr   e Zd Zejdfdddd fddZdd	d	d
ddZdd	d	dddZddddZe	ddddZ
  ZS )TripletLoss   r   floatNone)modeltriplet_marginreturnc                   s    t    || _|| _|| _dS )a  
        This class implements triplet loss. Given a triplet of (anchor, positive, negative),
        the loss minimizes the distance between anchor and positive while it maximizes the distance
        between anchor and negative. It compute the following loss function:

        ``loss = max(||anchor - positive|| - ||anchor - negative|| + margin, 0)``.

        Margin is an important hyperparameter and needs to be tuned respectively.

        Args:
            model: SentenceTransformerModel
            distance_metric: Function to compute distance between two
                embeddings. The class TripletDistanceMetric contains
                common distance metrices that can be used.
            triplet_margin: The negative should be at least this much
                further away from the anchor than the positive.

        References:
            - For further details, see: https://en.wikipedia.org/wiki/Triplet_loss

        Requirements:
            1. (anchor, positive, negative) triplets

        Inputs:
            +---------------------------------------+--------+
            | Texts                                 | Labels |
            +=======================================+========+
            | (anchor, positive, negative) triplets | none   |
            +---------------------------------------+--------+

        Example:
            ::

                from sentence_transformers import SentenceTransformer, SentenceTransformerTrainer, losses
                from datasets import Dataset

                model = SentenceTransformer("microsoft/mpnet-base")
                train_dataset = Dataset.from_dict({
                    "anchor": ["It's nice weather outside today.", "He drove to work."],
                    "positive": ["It's so sunny.", "He took the car to the office."],
                    "negative": ["It's quite rainy, sadly.", "She walked to the store."],
                })
                loss = losses.TripletLoss(model=model)

                trainer = SentenceTransformerTrainer(
                    model=model,
                    train_dataset=train_dataset,
                    loss=loss,
                )
                trainer.train()
        N)super__init__r   distance_metricr    )selfr   r$   r    	__class__r   r   r#      s    6
zTripletLoss.__init__zIterable[dict[str, Tensor]]r   )sentence_featureslabelsr!   c                   s    fdd|D }  ||S )Nc                   s   g | ]}  |d  qS )Zsentence_embedding)r   ).0Zsentence_featurer%   r   r   
<listcomp>S   r   z'TripletLoss.forward.<locals>.<listcomp>)compute_loss_from_embeddings)r%   r(   r)   
embeddingsr   r+   r   forwardR   s    zTripletLoss.forwardzlist[Tensor])r.   r)   r!   c           	      C  s>   |\}}}|  ||}|  ||}t|| | j }| S )z
        Compute the CoSENT loss from embeddings.

        Args:
            embeddings: List of embeddings

        Returns:
            Loss value
        )r$   FZrelur    mean)	r%   r.   r)   Z
rep_anchorZrep_posZrep_negZdistance_posZdistance_negZlossesr   r   r   r-   W   s
    

z(TripletLoss.compute_loss_from_embeddingszdict[str, Any])r!   c                 C  sB   | j j}tt D ] \}}|| j krd| } q6q|| jdS )NzTripletDistanceMetric.)r$   r    )r$   r   varsr   itemsr    )r%   Zdistance_metric_namenamevaluer   r   r   get_config_dicth   s    

zTripletLoss.get_config_dictstrc                 C  s   dS )Na  
@misc{hermans2017defense,
    title={In Defense of the Triplet Loss for Person Re-Identification},
    author={Alexander Hermans and Lucas Beyer and Bastian Leibe},
    year={2017},
    eprint={1703.07737},
    archivePrefix={arXiv},
    primaryClass={cs.CV}
}
r   r+   r   r   r   citationq   s    zTripletLoss.citation)r   r   r   r   r   r#   r/   r-   r6   propertyr8   __classcell__r   r   r&   r   r      s   ;	r   )
__future__r   collections.abcr   enumr   typingr   Ztorch.nn.functionalr   Z
functionalr0   Ztorchr   Z)sentence_transformers.SentenceTransformerr   Zsentence_transformers.utilr	   r
   r   r   Moduler   r   r   r   r   <module>   s   