
    Ch                        S SK Jr  S SKJr  S SKJr  S SKJr  S SKJ	s  J
r  S SKJrJ	r	  S SKJr   " S S	\5      r " S
 S\	R"                  5      rg)    )annotations)Iterable)Enum)AnyN)Tensornn)SentenceTransformerc                  *    \ rS rSrSrS rS rS rSrg)SiameseDistanceMetric   z#The metric for the contrastive lossc                ,    [         R                  " XSS9$ )N   pFpairwise_distancexys     d/var/www/html/shao/venv/lib/python3.13/site-packages/sentence_transformers/losses/ContrastiveLoss.py<lambda>SiameseDistanceMetric.<lambda>       Q00;    c                ,    [         R                  " XSS9$ )N   r   r   r   s     r   r   r      r   r   c                4    S[         R                  " X5      -
  $ )Nr   )r   cosine_similarityr   s     r   r   r      s    1q':':1'@#@r    N)	__name__
__module____qualname____firstlineno____doc__	EUCLIDEAN	MANHATTANCOSINE_DISTANCE__static_attributes__r    r   r   r   r      s    -;I;I@Or   r   c                     ^  \ rS rSr\R
                  SS4       S	U 4S jjjrS
S jrSS jr\	SS j5       r
SrU =r$ )ContrastiveLoss         ?Tc                R   > [         TU ]  5         X l        X0l        Xl        X@l        g)a  
Contrastive loss. Expects as input two texts and a label of either 0 or 1. If the label == 1, then the distance between the
two embeddings is reduced. If the label == 0, then the distance between the embeddings is increased.

Args:
    model: SentenceTransformer model
    distance_metric: Function that returns a distance between
        two embeddings. The class SiameseDistanceMetric contains
        pre-defined metrices that can be used
    margin: Negative samples (label == 0) should have a distance
        of at least the margin value.
    size_average: Average by the size of the mini-batch.

References:
    * Further information: https://www.researchgate.net/publication/4246277_Dimensionality_Reduction_by_Learning_an_Invariant_Mapping
    * `Training Examples > Quora Duplicate Questions <../../../examples/sentence_transformer/training/quora_duplicate_questions/README.html>`_

Requirements:
    1. (anchor, positive/negative) pairs

Inputs:
    +-----------------------------------------------+------------------------------+
    | Texts                                         | Labels                       |
    +===============================================+==============================+
    | (anchor, positive/negative) pairs             | 1 if positive, 0 if negative |
    +-----------------------------------------------+------------------------------+

Relations:
    - :class:`OnlineContrastiveLoss` is similar, but uses hard positive and hard negative pairs.
      It often yields better results.

Example:
    ::

        from sentence_transformers import SentenceTransformer, SentenceTransformerTrainer, losses
        from datasets import Dataset

        model = SentenceTransformer("microsoft/mpnet-base")
        train_dataset = Dataset.from_dict({
            "sentence1": ["It's nice weather outside today.", "He drove to work."],
            "sentence2": ["It's so sunny.", "She walked to the store."],
            "label": [1, 0],
        })
        loss = losses.ContrastiveLoss(model)

        trainer = SentenceTransformerTrainer(
            model=model,
            train_dataset=train_dataset,
            loss=loss,
        )
        trainer.train()
N)super__init__distance_metricmarginmodelsize_average)selfr3   r1   r2   r4   	__class__s        r   r0   ContrastiveLoss.__init__   s'    v 	.
(r   c                    U R                   R                  n[        [        5      R	                  5        H  u  p#X0R                   :X  d  M  SU 3n  O   XR
                  U R                  S.$ )NzSiameseDistanceMetric.)r1   r2   r4   )r1   r!   varsr   itemsr2   r4   )r5   distance_metric_namenamevalues       r   get_config_dictContrastiveLoss.get_config_dictW   sc    #33<< 56<<>KD,,,)?v'F$ ?
 $8;;`d`q`qrrr   c                   U Vs/ sH  o0R                  U5      S   PM     nn[        U5      S:X  d   eUu  pVU R                  XV5      nSUR                  5       UR	                  S5      -  SU-
  R                  5       [
        R                  " U R                  U-
  5      R	                  S5      -  -   -  nU R                  (       a  UR                  5       $ UR                  5       $ s  snf )Nsentence_embeddingr   r-   r   )r3   lenr1   floatpowr   relur2   r4   meansum)	r5   sentence_featureslabelssentence_featurereps
rep_anchor	rep_other	distanceslossess	            r   forwardContrastiveLoss.forward`   s    [lm[lGW

+,-AB[lm4yA~~ $
((?	LLNY]]1--V0B0B0Dqvvdkk\eNeGfGjGjklGm0mm
 !% 1 1v{{}Cvzz|C ns   C&c                    g)Na~  
@inproceedings{hadsell2006dimensionality,
    author={Hadsell, R. and Chopra, S. and LeCun, Y.},
    booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
    title={Dimensionality Reduction by Learning an Invariant Mapping},
    year={2006},
    volume={2},
    number={},
    pages={1735-1742},
    doi={10.1109/CVPR.2006.100}
}
r    )r5   s    r   citationContrastiveLoss.citationj   s    r   )r1   r2   r3   r4   )r3   r	   r2   rC   r4   boolreturnNone)rV   zdict[str, Any])rH   zIterable[dict[str, Tensor]]rI   r   rV   r   )rV   str)r!   r"   r#   r$   r   r(   r0   r>   rP   propertyrS   r)   __classcell__)r6   s   @r   r+   r+      se     .==!?)"?) 	?)
 ?) 
?) ?)BsD  r   r+   )
__future__r   collections.abcr   enumr   typingr   torch.nn.functionalr   
functionalr   torchr   )sentence_transformers.SentenceTransformerr	   r   Moduler+   r    r   r   <module>rd      s=    " $      IAD Abbii br   