a
    h!T                  
   @   s   d dl mZmZmZ d dlZd dlmZ d dlmZ ddlm	Z	 ddl
mZmZ ddlmZ dd	lmZmZ dd
lmZ ddlmZmZmZ ddlmZmZ ddlmZmZ ddlmZm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z& ddl'm(Z( ddl)m*Z* G dd dej+Z,dd Z-d2ddZ.ej/e0ej/dddZ1d3ej+ej/ej/ej/eej/ e2e2e"e$ ddd Z3G d!d" d"ej+Z4G d#d$ d$eZ5G d%d& d&ej+Z6e%G d'd( d(e Z7e%G d)d* d*e7Z8e%G d+d, d,e7eZ9G d-d. d.ee7Z:G d/d0 d0ee7Z;g d1Z<dS )4    )CallableOptionalUnionN)nn)check_model_inputs   )ACT2FN)CacheDynamicCache)GenerationMixin)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs) GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg   )Starcoder2Configc                       s>   e Zd Zed fddZeeej  ejdddZ	  Z
S )Starcoder2MLPconfigc                    sT   t    |j}tj||j|jd| _tj|j||jd| _t	|j
 | _|j| _d S )NZbias)super__init__hidden_sizer   LinearZintermediate_sizeuse_biasc_fcc_projr   Z
hidden_actactresidual_dropout)selfr!   Z	embed_dim	__class__ n/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/starcoder2/modeling_starcoder2.pyr$   6   s    
zStarcoder2MLP.__init__)hidden_statesreturnc                 C   s8   |  |}| |}| |}tjj|| j| jd}|S )Nptraining)r(   r*   r)   r   
functionaldropoutr+   r5   )r,   r1   r/   r/   r0   forward>   s
    


zStarcoder2MLP.forward)__name__
__module____qualname__r   r$   r   tupletorchFloatTensorr8   __classcell__r/   r/   r-   r0   r   5   s   r   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..N   dim)shaper=   cat)xx1Zx2r/   r/   r0   rotate_halfF   s    rH   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerH   )qkcossinposition_idsZunsqueeze_dimZq_embedZk_embedr/   r/   r0   apply_rotary_pos_embM   s
    

rO   )r1   n_repr2   c                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rD   expandreshape)r1   rP   batchnum_key_value_headsslenhead_dimr/   r/   r0   	repeat_kvh   s
    0rW           )modulequerykeyvalueattention_maskscalingr7   kwargsc                 K   s   t || j}t || j}	t||dd| }
|d urf|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )NrA   r   r@   )rC   dtyper3   r   )rW   num_key_value_groupsr=   matmul	transposerD   r   r6   ZsoftmaxZfloat32tora   r7   r5   
contiguous)rY   rZ   r[   r\   r]   r^   r7   r_   
key_statesvalue_statesattn_weightscausal_maskattn_outputr/   r/   r0   eager_attention_forwardt   s    
&rl   c                       s   e Zd ZdZdeee d fddZedddd	de	j
ee	j
e	j
f ee	j
 ee ee	j ee ee	j
ee	j
 eee	j
  f d
ddZ  ZS )Starcoder2Attentionz=Multi-headed attention from 'Attention Is All You Need' paperNr!   	layer_idxc                    s   t    || _|| _t|dd p,|j|j | _|j|j | _	| jd | _
|j| _d| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j|j| j |jd| _tj|j| j |j|jd| _|j| _d S )NrV   g      Tr"   )r#   r$   r!   ro   getattrr%   Znum_attention_headsrV   rT   rb   r^   attention_dropoutZ	is_causalr   r&   r'   q_projk_projv_projo_projr+   r,   r!   ro   r-   r/   r0   r$      s    
zStarcoder2Attention.__init__past_key_valuepast_key_values4.58new_nameversion)r1   position_embeddingsr]   rx   cache_positionr_   r2   c                 K   sF  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}|\}}t|	|
||\}	}
|d ur|||d}||
|| j	|\}
}t
}| jjdkrt| jj }|| |	|
||f| jsdn| j| jt| jdd d|\}}|jg |dR   }| |}tjj|| j| jd	}||fS )
Nr@   r   rA   )rM   rL   r~   eagerrX   sliding_window)r7   r^   r   r3   )rD   rV   rr   viewrd   rs   rt   rO   updatero   rl   r!   Z_attn_implementationr   r5   rq   r^   rp   rR   rf   ru   r   r6   r7   r+   )r,   r1   r}   r]   rx   r~   r_   Zinput_shapeZhidden_shapeZquery_statesrg   rh   rL   rM   Zcache_kwargsZattention_interfacerk   ri   r/   r/   r0   r8      s@    
	


zStarcoder2Attention.forward)N)NN)r9   r:   r;   __doc__r   r   intr$   r   r=   Tensorr<   r	   
LongTensorr   r   r8   r?   r/   r/   r-   r0   rm      s     rm   c                       s   e Zd Zeed fddZedddddeje	ej e	ej
 e	e e	e e	ej
 e	eejejf  ee ejd
	ddZ  ZS )Starcoder2DecoderLayerrn   c                    sV   t    |j| _t||d| _t|| _tj|j|j	d| _
tj|j|j	d| _d S )Nrn   eps)r#   r$   r%   rm   	self_attnr   mlpr   	LayerNormnorm_epsiloninput_layernormpost_attention_layernormrv   r-   r/   r0   r$      s    

zStarcoder2DecoderLayer.__init__rw   rx   ry   rz   NF)	r1   r]   rN   rx   	use_cacher~   r}   r_   r2   c              
   K   s^   |}	|  |}| jf |||||||d|\}}
|	| }|}	| |}| |}|	| }|S )N)r1   r]   rN   rx   r   r~   r}   )r   r   r   r   )r,   r1   r]   rN   rx   r   r~   r}   r_   Zresidual_r/   r/   r0   r8      s&    




zStarcoder2DecoderLayer.forward)NNNFNN)r9   r:   r;   r   r   r$   r   r=   r   r   r   r	   boolr<   r   r   r8   r?   r/   r/   r-   r0   r      s&         r   c                       sD   e Zd ZU ejed< ded fddZe e	dd Z
  ZS )	Starcoder2RotaryEmbeddinginv_freqNr    c                    s   t    t|dr:t|jtr:|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultr   F)
persistent)r#   r$   hasattr
isinstancer   dictgetr   Zmax_position_embeddingsZmax_seq_len_cachedZoriginal_max_seq_lenr!   r   Zrope_init_fnattention_scalingZregister_bufferr   Zoriginal_inv_freq)r,   r!   devicer   r-   r/   r0   r$     s    
z"Starcoder2RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtrl|jjdkrl|jjnd}t	j
|ddV | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 s0    Y  |j|jd
|	j|jd
fS )Nr   r@   r   ZmpscpuF)device_typeZenabledrA   rB   )ra   )r   floatrQ   rD   re   r   r   r   strr=   Zautocastrd   rE   rL   r   rM   ra   )
r,   rF   rN   Zinv_freq_expandedZposition_ids_expandedr   ZfreqsZembrL   rM   r/   r/   r0   r8     s    0&,z!Starcoder2RotaryEmbedding.forward)N)r9   r:   r;   r=   r   __annotations__r   r$   Zno_gradr   r8   r?   r/   r/   r-   r0   r      s
   

r   c                   @   sH   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZdS )Starcoder2PreTrainedModelr!   modelTr   rx   )r1   
attentionsN)r9   r:   r;   r   r   Zbase_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attnZ_supports_sdpaZ_supports_flex_attnZ_can_compile_fullgraphZ_supports_attention_backendr   rm   Z_can_record_outputsr/   r/   r/   r0   r   "  s   
r   c                       s~   e Zd Zed fddZedeej eej	 eej ee
eeej f  eej ee eej ee ed	ddZ  ZS )	Starcoder2Modelr    c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _tj j jd| _t d| _d| _ j| _|   d S )Nc                    s   g | ]}t  |qS r/   )r   ).0ro   r    r/   r0   
<listcomp>>      z,Starcoder2Model.__init__.<locals>.<listcomp>r   r    F)r#   r$   Zpad_token_idZpadding_idx
vocab_sizer   Z	Embeddingr%   embed_tokensZ
ModuleListrangenum_hidden_layerslayersr   r   normr   
rotary_embZgradient_checkpointingembedding_dropout	post_initr,   r!   r-   r    r0   r$   7  s    zStarcoder2Model.__init__N)		input_idsr]   rN   rx   inputs_embedsr   r~   r_   r2   c              
   K   s2  |d u |d uA rt d|d u r*| |}|rB|d u rBt| jd}|d u rz|d urZ| nd}	tj|	|	|jd  |jd}|d u r|	d}| jj
d u rtnt}
|
| j|||||d}|}tjj|| j| jd}| ||}| jd | jj D ]"}||f||||||d|}q| |}t||r*|nd d	S )
Nz:You must specify exactly one of input_ids or inputs_embedsr    r   r   )r   )r!   Zinput_embedsr]   r~   rx   rN   r3   )r]   rN   rx   r   r~   r}   )last_hidden_staterx   )
ValueErrorr   r
   r!   Zget_seq_lengthr=   ZarangerD   r   rI   r   r   r   r   r6   r7   r   r5   r   r   r   r   r   )r,   r   r]   rN   rx   r   r   r~   r_   Zpast_seen_tokensZmask_functionrj   r1   r}   Zdecoder_layerr/   r/   r0   r8   H  sZ    

	

zStarcoder2Model.forward)NNNNNNN)r9   r:   r;   r   r$   r   r   r=   r   r   r   r	   listr>   r   r   r   r   r8   r?   r/   r/   r-   r0   r   5  s(          r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZeede	e
j e	e
j e	e
j e	e e	e
j e	e
j e	e e	e
j eee
jf ee ed
ddZ  ZS )Starcoder2ForCausalLMzlm_head.weightlm_headZcolwise_repr1   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S )NFr"   )
r#   r$   r   r   r   r   r&   r%   r   r   r   r-   r/   r0   r$     s
    
zStarcoder2ForCausalLM.__init__Nr   )r   r]   rN   rx   r   labelsr   r~   logits_to_keepr_   r2   c
              
   K   s   | j f |||||||d|
}|j}t|	tr<t|	 dn|	}| |dd|ddf }d}|dur| jf ||| jjd|
}t	|||j
|j|jdS )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, Starcoder2ForCausalLM

        >>> model = Starcoder2ForCausalLM.from_pretrained("meta-starcoder2/Starcoder2-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-starcoder2/Starcoder2-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r   r]   rN   rx   r   r   r~   N)r   r   r   )lossr   rx   r1   r   )r   r   r   r   slicer   Zloss_functionr!   r   r   rx   r1   r   )r,   r   r]   rN   rx   r   r   r   r~   r   r_   outputsr1   Zslice_indicesr   r   r/   r/   r0   r8     s0     zStarcoder2ForCausalLM.forward)	NNNNNNNNr   )r9   r:   r;   Z_tied_weights_keysZ_tp_planZ_pp_planr$   r   r   r   r=   r   r   r	   r>   r   r   r   r   r   r   r8   r?   r/   r/   r-   r0   r     s8   	         r   c                   @   s   e Zd ZdS )#Starcoder2ForSequenceClassificationNr9   r:   r;   r/   r/   r/   r0   r     s   r   c                   @   s   e Zd ZdS ) Starcoder2ForTokenClassificationNr   r/   r/   r/   r0   r     s   r   )r   r   r   r   r   )Nr   )rX   )=typingr   r   r   r=   r   Ztransformers.utils.genericr   Zactivationsr   Zcache_utilsr	   r
   Z
generationr   Zmasking_utilsr   r   Zmodeling_flash_attention_utilsr   Zmodeling_layersr   r   r   Zmodeling_outputsr   r   Zmodeling_rope_utilsr   r   Zmodeling_utilsr   r   Zprocessing_utilsr   utilsr   r   r   Zutils.deprecationr   Zconfiguration_starcoder2r   Moduler   rH   rO   r   r   rW   r   rl   rm   r   r   r   r   r   r   r   __all__r/   r/   r/   r0   <module>   sV   
 D,$UK