a
    hZ                  
   @   s&  d dl mZmZmZ d dlZd dlmZ d dlmZ ddlm	Z	 ddl
mZmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZ ddlmZmZmZ ddlmZmZ ddlmZmZ ddl m!Z!m"Z" ddl#m$Z$ ddl%m&Z&m'Z'm(Z( ddl)m*Z* ddl+m,Z, G dd dej-Z.dd Z/ej0e1ej0dddZ2d6ej-ej0ej0ej0eej0 e3e3e$e& dddZ4d7d d!Z5G d"d# d#ej-Z6ed$G d%d& d&ej-Z7G d'd( d(eZ8e'G d)d* d*e"Z9G d+d, d,ej-Z:e'G d-d. d.e9Z;e'G d/d0 d0e9eZ<G d1d2 d2ee9Z=G d3d4 d4ee9Z>g d5Z?dS )8    )CallableOptionalUnionN)nn)check_model_inputs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg   )
Phi3Configc                       s0   e Zd Z fddZejejdddZ  ZS )Phi3MLPc                    sP   t    || _tj|jd|j dd| _tj|j|jdd| _t	|j
 | _d S )N   FZbias)super__init__configr   Linearhidden_sizeZintermediate_sizegate_up_proj	down_projr   Z
hidden_actactivation_fnselfr%   	__class__ b/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/phi3/modeling_phi3.pyr$   3   s
    
zPhi3MLP.__init__)hidden_statesreturnc                 C   s4   |  |}|jddd\}}|| | }| |S )Nr!   dim)r(   chunkr*   r)   )r,   r1   Z	up_statesZgater/   r/   r0   forward;   s    
zPhi3MLP.forward)__name__
__module____qualname__r$   torchFloatTensorr7   __classcell__r/   r/   r-   r0   r    2   s   r    c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr3   r!   r4   )shaper;   cat)xx1Zx2r/   r/   r0   rotate_halfD   s    rB   )r1   n_repr2   c                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r>   expandreshape)r1   rC   batchnum_key_value_headsslenhead_dimr/   r/   r0   	repeat_kvK   s
    0rJ           )modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d urf|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr!   r   r3   )r5   dtype)ptrainingr   )rJ   num_key_value_groupsr;   matmul	transposer>   r   Z
functionalZsoftmaxfloat32torU   rR   rW   
contiguous)rL   rM   rN   rO   rP   rQ   rR   rS   
key_statesvalue_statesattn_weightscausal_maskattn_outputr/   r/   r0   eager_attention_forwardW   s    
&rc   c                 C   s   | |}| |}|jd }| dd|f | d|df  }}|dd|f |d|df  }	}
tj|| t||  |gdd}tj|	| t|	|  |
gdd}||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    r3   .Nr4   )	unsqueezer>   r;   r?   rB   )qkcossinposition_idsZunsqueeze_dimZ
rotary_dimZq_rotZq_passZk_rotZk_passZq_embedZk_embedr/   r/   r0   apply_rotary_pos_embq   s    


""""rj   c                       s   e Zd ZdZdeee d fddZedddd	de	j
ee	j
e	j
f ee	j
 ee ee	j ee ee	j
ee	j
 eee	j
  f d
ddZ  ZS )Phi3Attentionz=Multi-headed attention from 'Attention Is All You Need' paperNr%   	layer_idxc                    s   t    || _|| _t|d|j|j | _|j|j | _	|j| _| jd | _
|j| _d| _|j| j d|j| j   }tj|j| j |jdd| _tj|j|dd| _d S )NrI   g      Tr!   Fr"   )r#   r$   r%   rm   getattrr'   num_attention_headsrI   rG   rX   rQ   attention_dropoutZ	is_causalr   r&   o_projqkv_proj)r,   r%   rm   Zop_sizer-   r/   r0   r$      s    
zPhi3Attention.__init__past_key_valuepast_key_values4.58new_nameversion)r1   position_embeddingsrP   rt   cache_positionrS   r2   c                 K   s  |j d d }g |d| jR }| |}	| jj| j }
|	dd |
f }|	d|
|
| j| j  f }|	d|
| j| j  d f }||dd}||dd}||dd}|\}}t||||\}}|d ur|||d}|	||| j
|\}}t}| jjdkrt| jj }|| ||||f| js4dn| j| jt| jdd d	|\}}|jg |dR   }| |}||fS )
Nr3   .r   r!   )rh   rg   rz   eagerrK   sliding_window)rR   rQ   r|   )r>   rI   rr   r%   ro   rG   viewrZ   rj   updaterm   rc   Z_attn_implementationr   rW   rp   rQ   rn   rE   r]   rq   )r,   r1   ry   rP   rt   rz   rS   Zinput_shapeZhidden_shapeZqkvZ	query_posZquery_statesr^   r_   rg   rh   Zcache_kwargsZattention_interfacerb   r`   r/   r/   r0   r7      sD    

	

zPhi3Attention.forward)N)NN)r8   r9   r:   __doc__r   r   intr$   r   r;   Tensortupler	   
LongTensorr   r   r7   r=   r/   r/   r-   r0   rk      s     rk   ZRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	Phi3RMSNormư>c                    s&   t    tt|| _|| _dS )z:
        Phi3RMSNorm is equivalent to T5LayerNorm
        N)r#   r$   r   	Parameterr;   Zonesweightvariance_epsilon)r,   r'   epsr-   r/   r0   r$      s    
zPhi3RMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )Nr!   r3   T)Zkeepdim)	rU   r\   r;   r[   powmeanZrsqrtr   r   )r,   r1   Zinput_dtypeZvariancer/   r/   r0   r7      s
    zPhi3RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)r   r   r>   r   )r,   r/   r/   r0   
extra_repr   s    zPhi3RMSNorm.extra_repr)r   )r8   r9   r:   r$   r7   r   r=   r/   r/   r-   r0   r      s   r   c                       s   e Zd Zeed fddZedddddeje	ej e	ej
 e	e e	e e	ej
 e	eejejf  ee eeje	eejejf  f d
	ddZ  ZS )Phi3DecoderLayerrl   c                    st   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
|| _t|j| _t|j| _d S )Nrl   r   )r#   r$   r'   rk   	self_attnr    mlpr   rms_norm_epsinput_layernormpost_attention_layernormr%   r   ZDropoutZresid_pdropresid_attn_dropoutresid_mlp_dropout)r,   r%   rm   r-   r/   r0   r$      s    

zPhi3DecoderLayer.__init__rs   rt   ru   rv   NF)	r1   rP   ri   rt   	use_cacherz   ry   rS   r2   c              
   K   sj   |}	|  |}| jf |||||||d|\}}
|	| | }|}	| |}| |}|	| | }|S )N)r1   rP   ri   rt   r   rz   ry   )r   r   r   r   r   r   )r,   r1   rP   ri   rt   r   rz   ry   rS   ZresidualZself_attn_weightsr/   r/   r0   r7      s&    




zPhi3DecoderLayer.forward)NNNFNN)r8   r9   r:   r   r   r$   r   r;   r   r   r   r	   boolr   r   r   r<   r7   r=   r/   r/   r-   r0   r      s&         r   c                   @   sL   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdZdS )	Phi3PreTrainedModelr%   modelTr   rt   )r1   
attentionsz0.0.5N)r8   r9   r:   r   __annotations__Zbase_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attnZ_supports_sdpaZ_supports_flex_attnZ_can_compile_fullgraphZ_supports_attention_backendr   rk   Z_can_record_outputs_versionr/   r/   r/   r0   r     s   
r   c                       sD   e Zd ZU ejed< ded fddZe e	dd Z
  ZS )	Phi3RotaryEmbeddinginv_freqNr%   c                    s   t    t|dr:t|jtr:|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultr   F)
persistent)r#   r$   hasattr
isinstancer   dictgetr   Zmax_position_embeddingsZmax_seq_len_cachedZoriginal_max_seq_lenr%   r   Zrope_init_fnattention_scalingZregister_bufferr   Zoriginal_inv_freq)r,   r%   devicer   r-   r/   r0   r$   1  s    
zPhi3RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtrl|jjdkrl|jjnd}t	j
|ddV | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 s0    Y  |j|jd
|	j|jd
fS )Nr   r3   r   ZmpscpuF)device_typeZenabledr!   r4   )rU   )r   floatrD   r>   r\   r   r   r   strr;   ZautocastrZ   r?   rg   r   rh   rU   )
r,   r@   ri   Zinv_freq_expandedZposition_ids_expandedr   ZfreqsZembrg   rh   r/   r/   r0   r7   B  s    0&,zPhi3RotaryEmbedding.forward)N)r8   r9   r:   r;   r   r   r   r$   Zno_gradr   r7   r=   r/   r/   r-   r0   r   .  s
   

r   c                       st   e Zd Zed fddZeedeej	 eej
 eej	 ee eej ee eej	 ee ed	ddZ  ZS )		Phi3Modelr   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r/   )r   ).0rm   r   r/   r0   
<listcomp>[      z&Phi3Model.__init__.<locals>.<listcomp>r   r   F)r#   r$   Zpad_token_idZpadding_idx
vocab_sizer   Z	Embeddingr'   embed_tokensZ
ModuleListrangenum_hidden_layerslayersr   r   normr   
rotary_embZgradient_checkpointing	post_initr+   r-   r   r0   r$   T  s    zPhi3Model.__init__N)		input_idsrP   ri   rt   inputs_embedsr   rz   rS   r2   c              
   K   s  |d u |d uA rt d|d u r*| |}|rB|d u rBt| jd}|d u rz|d urZ| nd}	tj|	|	|jd  |jd}|d u r|	d}| jj
d u rtnt}
|
| j|||||d}|}| ||}| jd | jj D ]"}||f||||||d|}q| |}t||r|nd dS )	Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )r   )r%   Zinput_embedsrP   rz   rt   ri   )rP   ri   rt   r   rz   ry   )last_hidden_statert   )
ValueErrorr   r
   r%   Zget_seq_lengthr;   Zaranger>   r   rd   r|   r   r   r   r   r   r   r   )r,   r   rP   ri   rt   r   r   rz   rS   Zpast_seen_tokensZmask_functionra   r1   ry   Zdecoder_layerr/   r/   r0   r7   d  sT    

	

zPhi3Model.forward)NNNNNNN)r8   r9   r:   r   r$   r   r   r   r;   r   r   r	   r<   r   r   r   r   r7   r=   r/   r/   r-   r0   r   R  s*          r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZeede	e
j e	e
j e	e
j e	e e	e
j e	e
j e	e e	e
j eee
jf ee ed
ddZd fdd	Z  ZS )Phi3ForCausalLMzlm_head.weightlm_headZcolwise_repr1   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S )NFr"   )
r#   r$   r   r   r   r   r&   r'   r   r   r+   r-   r/   r0   r$     s
    
zPhi3ForCausalLM.__init__Nr   )r   rP   ri   rt   r   labelsr   rz   logits_to_keeprS   r2   c
              
   K   s   | j f |||||||d|
}|j}t|	tr<t|	 dn|	}| |dd|ddf }d}|dur| jf ||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Phi3ForCausalLM

        >>> model = Phi3ForCausalLM.from_pretrained("meta-phi3/Phi3-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-phi3/Phi3-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   rP   ri   rt   r   r   rz   N)r   r   r   )lossr   rt   r1   r   )r   r   r   r   slicer   Zloss_functionr%   r   r   rt   r1   r   )r,   r   rP   ri   rt   r   r   r   rz   r   rS   outputsr1   Zslice_indicesr   r   r/   r/   r0   r7     s0     zPhi3ForCausalLM.forwardTc	                    sb   |r:| j jr:|jd | j jd kr:|d }
|
| j jkr:d }t jf ||||||||d|	}|S )Nr   r   )r   rt   rP   r   rz   ri   r   r   )r%   r   r>   Z original_max_position_embeddingsr#   prepare_inputs_for_generation)r,   r   rt   rP   r   rz   ri   r   r   rS   Zpast_lengthZmodel_inputsr-   r/   r0   r     s,    	z-Phi3ForCausalLM.prepare_inputs_for_generation)	NNNNNNNNr   )NNNNNTN)r8   r9   r:   Z_tied_weights_keysZ_tp_planZ_pp_planr$   r   r   r   r;   r   r   r	   r<   r   r   r   r   r   r   r7   r   r=   r/   r/   r-   r0   r     sH   	         =       r   c                   @   s   e Zd ZdS )Phi3ForSequenceClassificationNr8   r9   r:   r/   r/   r/   r0   r     s   r   c                   @   s   e Zd ZdS )Phi3ForTokenClassificationNr   r/   r/   r/   r0   r     s   r   )r   r   r   r   r   )rK   )Nr   )@typingr   r   r   r;   r   Ztransformers.utils.genericr   Zactivationsr   Zcache_utilsr	   r
   Z
generationr   Zintegrationsr   Zmasking_utilsr   r   Zmodeling_flash_attention_utilsr   Zmodeling_layersr   r   r   Zmodeling_outputsr   r   Zmodeling_rope_utilsr   r   Zmodeling_utilsr   r   Zprocessing_utilsr   utilsr   r   r   Zutils.deprecationr   Zconfiguration_phi3r   Moduler    rB   r   r   rJ   r   rc   rj   rk   r   r   r   r   r   r   r   r   __all__r/   r/   r/   r0   <module>   s\    
 F.$Or