a
    hP                  
   @   s:  d dl mZmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZ dd
lmZmZmZmZ ddlmZmZ ddlmZmZ ddlmZmZ ddlm Z  ddl!m"Z"m#Z#m$Z$m%Z% ddl&m'Z' ddl(m)Z) ddl*m+Z+ e%,e-Z.edG dd dej/Z0G dd dej/Z1dd Z2d7ddZ3G dd dej/Z4ej5e6ej5dd d!Z7d8ej/ej5ej5ej5eej5 e8e8e e" d#d$d%Z9G d&d' d'ej/Z:G d(d) d)eZ;e#G d*d+ d+eZ<e#G d,d- d-e<Z=e#G d.d/ d/e<eZ>G d0d1 d1ee<Z?G d2d3 d3ee<Z@G d4d5 d5ee<ZAg d6ZBdS )9    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)deprecate_kwarg)check_model_inputs   )LlamaConfigZRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	LlamaRMSNormư>c                    s&   t    tt|| _|| _dS )z;
        LlamaRMSNorm is equivalent to T5LayerNorm
        N)super__init__r   	ParametertorchZonesweightvariance_epsilon)selfhidden_sizeeps	__class__ d/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/llama/modeling_llama.pyr#   6   s    
zLlamaRMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )N   T)Zkeepdim)	dtypetor%   float32powmeanZrsqrtr'   r&   )r(   hidden_statesZinput_dtypeZvariancer-   r-   r.   forward>   s
    zLlamaRMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler&   shaper'   )r(   r-   r-   r.   
extra_reprE   s    zLlamaRMSNorm.extra_repr)r!   )__name__
__module____qualname__r#   r7   r:   __classcell__r-   r-   r+   r.   r    4   s   r    c                       sD   e Zd ZU ejed< ded fddZe e	dd Z
  ZS )	LlamaRotaryEmbeddinginv_freqNconfigc                    s   t    t|dr:t|jtr:|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultr@   F)
persistent)r"   r#   hasattr
isinstancerC   dictgetrD   Zmax_position_embeddingsZmax_seq_len_cachedZoriginal_max_seq_lenrB   r   Zrope_init_fnattention_scalingZregister_bufferr@   Zoriginal_inv_freq)r(   rB   devicer@   r+   r-   r.   r#   L   s    
zLlamaRotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtrl|jjdkrl|jjnd}t	j
|ddV | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 s0    Y  |j|jd
|	j|jd
fS )Nr   r0   r   ZmpscpuF)device_typeZenabledr/   dim)r1   )r@   floatexpandr9   r2   rM   rI   rE   strr%   Zautocast	transposecatcosrL   sinr1   )
r(   xposition_idsZinv_freq_expandedZposition_ids_expandedrO   ZfreqsZembrW   rX   r-   r-   r.   r7   ]   s    0&,zLlamaRotaryEmbedding.forward)N)r;   r<   r=   r%   Tensor__annotations__r   r#   Zno_gradr   r7   r>   r-   r-   r+   r.   r?   I   s
   

r?   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr0   r/   rP   )r9   r%   rV   )rY   x1Zx2r-   r-   r.   rotate_halfm   s    r^   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer^   )qkrW   rX   rZ   Zunsqueeze_dimZq_embedZk_embedr-   r-   r.   apply_rotary_pos_embt   s
    

rb   c                       s$   e Zd Z fddZdd Z  ZS )LlamaMLPc                    sx   t    || _|j| _|j| _tj| j| j|jd| _tj| j| j|jd| _	tj| j| j|jd| _
t|j | _d S )NZbias)r"   r#   rB   r)   Zintermediate_sizer   LinearZmlp_bias	gate_projup_proj	down_projr   Z
hidden_actact_fnr(   rB   r+   r-   r.   r#      s    
zLlamaMLP.__init__c                 C   s$   |  | | || | }|S )N)rh   ri   rf   rg   )r(   rY   rh   r-   r-   r.   r7      s     zLlamaMLP.forward)r;   r<   r=   r#   r7   r>   r-   r-   r+   r.   rc      s   
rc   )r6   n_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r9   rS   reshape)r6   rk   batchnum_key_value_headsslenhead_dimr-   r-   r.   	repeat_kv   s
    0rr           )modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d urf|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr/   r   r0   )rQ   r1   )ptrainingr   )rr   num_key_value_groupsr%   matmulrU   r9   r   Z
functionalZsoftmaxr3   r2   r1   rz   r~   
contiguous)rt   ru   rv   rw   rx   ry   rz   r{   
key_statesvalue_statesattn_weightscausal_maskattn_outputr-   r-   r.   eager_attention_forward   s    
&r   c                       s   e Zd ZdZeed fddZedddddej	e
ej	ej	f eej	 ee eej ee e
ej	ej	f d
ddZ  ZS )LlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperrB   	layer_idxc                    s   t    || _|| _t|d|j|j | _|j|j | _	| jd | _
|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _d S )Nrq   g      Trd   )r"   r#   rB   r   getattrr)   Znum_attention_headsrq   ro   r   ry   attention_dropoutZ	is_causalr   re   Zattention_biasq_projk_projv_projo_projr(   rB   r   r+   r-   r.   r#      s(    
zLlamaAttention.__init__past_key_valuepast_key_values4.58new_nameversionN)r6   position_embeddingsrx   r   cache_positionr{   rl   c                 K   s$  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}|\}}t|	|
||\}	}
|d ur|||d}||
|| j	|\}
}t
}| jjdkrt| jj }|| |	|
||f| jsdn| j| jd|\}}|jg |dR   }| |}||fS )Nr0   r   r/   )rX   rW   r   eagerrs   )rz   ry   )r9   rq   r   viewrU   r   r   rb   updater   r   rB   Z_attn_implementationr   r~   r   ry   rm   r   r   )r(   r6   r   rx   r   r   r{   Zinput_shapeZhidden_shapeZquery_statesr   r   rW   rX   Zcache_kwargsZattention_interfacer   r   r-   r-   r.   r7      s8    


zLlamaAttention.forward)NN)r;   r<   r=   __doc__r   intr#   r   r%   r[   r8   r   r   
LongTensorr   r   r7   r>   r-   r-   r+   r.   r      s     r   c                       s   e Zd Zeed fddZedddddeje	ej e	ej
 e	e e	e e	ej
 e	eejejf  ee ejd
	ddZ  ZS )LlamaDecoderLayerr   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )Nr   r*   )r"   r#   r)   r   	self_attnrc   mlpr    rms_norm_epsinput_layernormpost_attention_layernormr   r+   r-   r.   r#     s    

zLlamaDecoderLayer.__init__r   r   r   r   NF)	r6   rx   rZ   r   	use_cacher   r   r{   rl   c              
   K   s^   |}	|  |}| jf |||||||d|\}}
|	| }|}	| |}| |}|	| }|S )N)r6   rx   rZ   r   r   r   r   )r   r   r   r   )r(   r6   rx   rZ   r   r   r   r   r{   Zresidual_r-   r-   r.   r7     s&    




zLlamaDecoderLayer.forward)NNNFNN)r;   r<   r=   r   r   r#   r   r%   r[   r   r   r   boolr8   r   r   r7   r>   r-   r-   r+   r.   r     s&   
      r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )LlamaPreTrainedModelrB   modelTr   r   )r6   
attentionsN)r;   r<   r=   r   r\   base_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attnZ_supports_sdpaZ_supports_flex_attnZ_can_compile_fullgraphZ_supports_attention_backendr   r   Z_can_record_outputsr-   r-   r-   r.   r   :  s   
r   c                       st   e Zd Zed fddZeedeej	 eej
 eej	 ee eej eej	 ee ee ed	ddZ  ZS )	
LlamaModelrA   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r-   )r   ).0r   rA   r-   r.   
<listcomp>V      z'LlamaModel.__init__.<locals>.<listcomp>r   rA   F)r"   r#   Zpad_token_idZpadding_idx
vocab_sizer   Z	Embeddingr)   embed_tokensZ
ModuleListrangenum_hidden_layerslayersr    r   normr?   
rotary_embZgradient_checkpointing	post_initrj   r+   rA   r.   r#   O  s    zLlamaModel.__init__N)		input_idsrx   rZ   r   inputs_embedsr   r   r{   rl   c              	   K   s   |d u |d uA rt d|d u r*| |}|rB|d u rBt| jd}|d u rz|d urZ| nd}	tj|	|	|jd  |jd}|d u r|	d}t
| j|||||d}
|}| ||}| jd | jj D ] }||f|
||||d|}q| |}t||dS )	Nz:You must specify exactly one of input_ids or inputs_embedsrA   r   r   )rM   )rB   Zinput_embedsrx   r   r   rZ   )rx   rZ   r   r   r   )last_hidden_stater   )
ValueErrorr   r	   rB   Zget_seq_lengthr%   Zaranger9   rM   r_   r   r   r   r   r   r   )r(   r   rx   rZ   r   r   r   r   r{   Zpast_seen_tokensr   r6   r   Zdecoder_layerr-   r-   r.   r7   _  sP    

	

zLlamaModel.forward)NNNNNNN)r;   r<   r=   r   r#   r   r   r   r%   r   r[   r   FloatTensorr   r   r   r   r7   r>   r-   r-   r+   r.   r   M  s*          r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZeede	e
j e	e
j e	e
j e	e e	e
j e	e
j e	e e	e
j eee
jf ee ed
ddZ  ZS )LlamaForCausalLMzlm_head.weightlm_headZcolwise_repr6   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S )NFrd   )
r"   r#   r   r   r   r   re   r)   r   r   rj   r+   r-   r.   r#     s
    
zLlamaForCausalLM.__init__Nr   )r   rx   rZ   r   r   labelsr   r   logits_to_keepr{   rl   c
              
   K   s   | j f |||||||d|
}|j}t|	tr<t|	 dn|	}| |dd|ddf }d}|dur| jf ||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, LlamaForCausalLM

        >>> model = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   rx   rZ   r   r   r   r   N)r   r   r   )lossr   r   r6   r   )r   r   rI   r   slicer   Zloss_functionrB   r   r   r   r6   r   )r(   r   rx   rZ   r   r   r   r   r   r   r{   outputsr6   Zslice_indicesr   r   r-   r-   r.   r7     s0     zLlamaForCausalLM.forward)	NNNNNNNNr   )r;   r<   r=   Z_tied_weights_keysZ_tp_planZ_pp_planr#   r   r   r   r%   r   r[   r   r   r   r   r   r   r   r   r7   r>   r-   r-   r+   r.   r     s8   	         r   c                   @   s   e Zd ZdS )LlamaForSequenceClassificationNr;   r<   r=   r-   r-   r-   r.   r     r   r   c                   @   s   e Zd ZdZdS )LlamaForQuestionAnsweringZtransformerN)r;   r<   r=   r   r-   r-   r-   r.   r     s   r   c                   @   s   e Zd ZdS )LlamaForTokenClassificationNr   r-   r-   r-   r.   r     r   r   )r   r   r   r   r   r   )Nr   )rs   )Ctypingr   r   r   r%   r   Zactivationsr   Zcache_utilsr   r	   Z
generationr
   Zintegrationsr   Zmasking_utilsr   Zmodeling_layersr   r   r   r   Zmodeling_outputsr   r   Zmodeling_rope_utilsr   r   Zmodeling_utilsr   r   Zprocessing_utilsr   utilsr   r   r   r   Zutils.deprecationr   Zutils.genericr   Zconfiguration_llamar   Z
get_loggerr;   loggerModuler    r?   r^   rb   rc   r[   r   rr   rR   r   r   r   r   r   r   r   r   r   __all__r-   r-   r-   r.   <module>   s^   
$
 G.NK