a
    hR                  
   @   s   d dl mZmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZ ddlmZmZ ddlmZmZ ddlmZ ddlm Z m!Z!m"Z" ddl#m$Z$ ddl%m&Z& ddl'm(Z( G dd dej)Z*edG dd dej)Z+G dd dej)Z,dd Z-d3ddZ.ej/e0ej/dd d!Z1d4ej)ej/ej/ej/eej/ e2e2ee  d#d$d%Z3G d&d' d'ej)Z4G d(d) d)eZ5e!G d*d+ d+eZ6e!G d,d- d-e6Z7e!G d.d/ d/e6eZ8G d0d1 d1ee6Z9g d2Z:dS )5    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)GenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg)check_model_inputs   )ApertusConfigc                       s$   e Zd Z fddZdd Z  ZS )
ApertusMLPc                    s\   t    || _|j| _|j| _tj| j| jdd| _tj| j| jdd| _t	|j
 | _d S NFZbias)super__init__confighidden_sizeZintermediate_sizer   Linearup_proj	down_projr   Z
hidden_actact_fnselfr"   	__class__ h/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/apertus/modeling_apertus.pyr!   ,   s    
zApertusMLP.__init__c                 C   s   |  | | |S )N)r&   r'   r%   )r)   xr,   r,   r-   forward5   s    zApertusMLP.forward)__name__
__module____qualname__r!   r/   __classcell__r,   r,   r*   r-   r   +   s   	r   ZRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	ApertusRMSNormư>c                    s&   t    tt|| _|| _dS )z=
        ApertusRMSNorm is equivalent to T5LayerNorm
        N)r    r!   r   	ParametertorchZonesweightvariance_epsilon)r)   r#   epsr*   r,   r-   r!   ;   s    
zApertusRMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )N   T)Zkeepdim)	dtypetor7   float32powmeanZrsqrtr9   r8   )r)   hidden_statesZinput_dtypeZvariancer,   r,   r-   r/   C   s
    zApertusRMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler8   shaper9   )r)   r,   r,   r-   
extra_reprJ   s    zApertusRMSNorm.extra_repr)r5   )r0   r1   r2   r!   r/   rE   r3   r,   r,   r*   r-   r4   9   s   r4   c                       sD   e Zd ZU ejed< ded fddZe e	dd Z
  ZS )	ApertusRotaryEmbeddinginv_freqNr"   c                    s   t    t|dr:t|jtr:|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultrG   F)
persistent)r    r!   hasattr
isinstancerI   dictgetrJ   Zmax_position_embeddingsZmax_seq_len_cachedZoriginal_max_seq_lenr"   r   Zrope_init_fnattention_scalingZregister_bufferrG   Zoriginal_inv_freq)r)   r"   devicerG   r*   r,   r-   r!   Q   s    
zApertusRotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtrl|jjdkrl|jjnd}t	j
|ddV | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 s0    Y  |j|jd
|	j|jd
fS )Nr   r<   r   ZmpscpuF)device_typeZenabledr;   dim)r=   )rG   floatexpandrD   r>   rS   rO   rK   strr7   Zautocast	transposecatcosrR   sinr=   )
r)   r.   position_idsZinv_freq_expandedZposition_ids_expandedrU   ZfreqsZembr]   r^   r,   r,   r-   r/   b   s    0&,zApertusRotaryEmbedding.forward)N)r0   r1   r2   r7   Tensor__annotations__r   r!   Zno_gradr   r/   r3   r,   r,   r*   r-   rF   N   s
   

rF   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr<   r;   rV   )rD   r7   r\   )r.   x1Zx2r,   r,   r-   rotate_halfr   s    rc   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerc   )qkr]   r^   r_   Zunsqueeze_dimZq_embedZk_embedr,   r,   r-   apply_rotary_pos_emby   s
    

rg   )rB   n_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rD   rY   reshape)rB   rh   batchnum_key_value_headsslenhead_dimr,   r,   r-   	repeat_kv   s
    0ro           )modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d urf|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr;   r   r<   )rW   r=   )ptrainingr   )ro   num_key_value_groupsr7   matmulr[   rD   r   Z
functionalZsoftmaxr?   r>   r=   rw   r{   
contiguous)rq   rr   rs   rt   ru   rv   rw   rx   
key_statesvalue_statesattn_weightscausal_maskattn_outputr,   r,   r-   eager_attention_forward   s    
&r   c                       s   e Zd ZdZdeee d fddZedddd	de	j
ee	j
e	j
f ee	j
 ee ee	j ee ee	j
e	j
f d
ddZ  ZS )ApertusAttentionz=Multi-headed attention from 'Attention Is All You Need' paperNr"   	layer_idxc                    s   t    || _|| _t|d|j|j | _|j|j | _	| jd | _
|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _t| j|j| _t| j|j| _d S )Nrn   g      Tr   )r    r!   r"   r   getattrr#   Znum_attention_headsrn   rl   r|   rv   attention_dropoutZ	is_causalr   r$   Zattention_biasq_projk_projv_projo_projr4   rms_norm_epsq_normk_normr)   r"   r   r*   r,   r-   r!      s,    
zApertusAttention.__init__past_key_valuepast_key_values4.58new_nameversion)rB   position_embeddingsru   r   cache_positionrx   ri   c                 K   s8  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}| |	}	| |
}
|\}}t	|	|
||\}	}
|d ur|||d}|
|
|| j|\}
}t}| jjdkrt| jj }|| |	|
||f| jsdn| j| jd|\}}|jg |dR   }| |}||fS )Nr<   r   r;   )r^   r]   r   eagerrp   )rw   rv   )rD   rn   r   viewr[   r   r   r   r   rg   updater   r   r"   Z_attn_implementationr   r{   r   rv   rj   r~   r   )r)   rB   r   ru   r   r   rx   Zinput_shapeZhidden_shapeZquery_statesr   r   r]   r^   Zcache_kwargsZattention_interfacer   r   r,   r,   r-   r/      s<    




zApertusAttention.forward)N)NN)r0   r1   r2   __doc__r   r   intr!   r   r7   r`   rC   r   
LongTensorr   r   r/   r3   r,   r,   r*   r-   r      s     r   c                       s   e Zd Zeed fddZedddddeje	ej e	ej
 e	e e	e e	ej
 e	eejejf  ee eej d
	ddZ  ZS )ApertusDecoderLayerr   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )Nr   r:   )r    r!   r#   r   	self_attnr   mlpr4   r   attention_layernormfeedforward_layernormr   r*   r,   r-   r!     s    

zApertusDecoderLayer.__init__r   r   r   r   NF)	rB   ru   r_   r   	use_cacher   r   rx   ri   c              
   K   s^   |}	|  |}| jf |||||||d|\}}
|	| }|}	| |}| |}|	| }|S )N)rB   ru   r_   r   r   r   r   )r   r   r   r   )r)   rB   ru   r_   r   r   r   r   rx   Zresidual_r,   r,   r-   r/     s&    




zApertusDecoderLayer.forward)NNNFNN)r0   r1   r2   r   r   r!   r   r7   r`   r   r   r   boolrC   r   r   r/   r3   r,   r,   r*   r-   r     s&   
      r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )ApertusPreTrainedModelr"   modelTr   r   )rB   
attentionsN)r0   r1   r2   r   ra   Zbase_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attnZ_supports_sdpaZ_supports_flex_attnZ_can_compile_fullgraphZ_supports_attention_backendr   r   Z_can_record_outputsr,   r,   r,   r-   r   1  s   
r   c                       st   e Zd Zed fddZeedeej	 eej
 eej	 ee eej eej	 ee ee ed	ddZ  ZS )	ApertusModelrH   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r,   )r   ).0r   rH   r,   r-   
<listcomp>M      z)ApertusModel.__init__.<locals>.<listcomp>r   rH   F)r    r!   Zpad_token_idZpadding_idx
vocab_sizer   Z	Embeddingr#   embed_tokensZ
ModuleListrangenum_hidden_layerslayersr4   r   normrF   
rotary_embZgradient_checkpointing	post_initr(   r*   rH   r-   r!   F  s    zApertusModel.__init__N)		input_idsru   r_   r   inputs_embedsr   r   rx   ri   c              	   K   s   |d u |d uA rt d|d u r*| |}|rB|d u rBt| jd}|d u rz|d urZ| nd}	tj|	|	|jd  |jd}|d u r|	d}t
| j|||||d}
|}| ||}| jd | jj D ] }||f|
||||d|}q| |}t||dS )	Nz:You must specify exactly one of input_ids or inputs_embedsrH   r   r   )rS   )r"   Zinput_embedsru   r   r   r_   )ru   r_   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   r"   Zget_seq_lengthr7   ZarangerD   rS   rd   r   r   r   r   r   r   )r)   r   ru   r_   r   r   r   r   rx   Zpast_seen_tokensr   rB   r   Zdecoder_layerr,   r,   r-   r/   V  sP    

	

zApertusModel.forward)NNNNNNN)r0   r1   r2   r   r!   r   r   r   r7   r   r`   r   FloatTensorr   r   r   r   r/   r3   r,   r,   r*   r-   r   D  s*          r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZeede	e
j e	e
j e	e
j e	e e	e
j e	e
j e	e e	e
j eee
jf ee ed
ddZ  ZS )ApertusForCausalLMzlm_head.weightlm_headZcolwise_reprB   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S r   )
r    r!   r   r   r   r   r$   r#   r   r   r(   r*   r,   r-   r!     s
    
zApertusForCausalLM.__init__Nr   )r   ru   r_   r   r   labelsr   r   logits_to_keeprx   ri   c
              
   K   s   | j f |||||||d|
}|j}t|	tr<t|	 dn|	}| |dd|ddf }d}|dur| jf ||| jjd|
}t	|||j
|j|jdS )an  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, ApertusForCausalLM

        >>> model = ApertusForCausalLM.from_pretrained("swiss-ai/Apertus-8B")
        >>> tokenizer = AutoTokenizer.from_pretrained("swiss-ai/Apertus-8B")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   ru   r_   r   r   r   r   N)r   r   r   )lossr   r   rB   r   )r   r   rO   r   slicer   Zloss_functionr"   r   r   r   rB   r   )r)   r   ru   r_   r   r   r   r   r   r   rx   outputsrB   Zslice_indicesr   r   r,   r,   r-   r/     s0    %zApertusForCausalLM.forward)	NNNNNNNNr   )r0   r1   r2   Z_tied_weights_keysZ_tp_planZ_pp_planr!   r   r   r   r7   r   r`   r   r   r   r   r   r   r   r   r/   r3   r,   r,   r*   r-   r     s8   	         r   c                   @   s   e Zd ZdS )ApertusForTokenClassificationN)r0   r1   r2   r,   r,   r,   r-   r     s   r   )r   r   r   r   )Nr   )rp   );typingr   r   r   r7   r   Zactivationsr   Zcache_utilsr   r	   Z
generationr
   Zintegrationsr   Zmasking_utilsr   Zmodeling_layersr   r   Zmodeling_outputsr   r   Zmodeling_rope_utilsr   r   Zmodeling_utilsr   r   Zprocessing_utilsr   utilsr   r   r   Zutils.deprecationr   Zutils.genericr   Zconfiguration_apertusr   Moduler   r4   rF   rc   rg   r`   r   ro   rX   r   r   r   r   r   r   r   __all__r,   r,   r,   r-   <module>   sX   $
 J-NP