a
    h\                  
   @   s0  d dl mZmZmZ d dlZd dlmZ d dlmZ ddlm	Z	 ddl
mZmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddlm Z m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z' ddl(m)Z) ddl*m+Z+ edG dd dej,Z-G dd dej,Z.dd Z/d7ddZ0ej1e2ej1dddZ3d8ej,ej1ej1ej1eej1 e4e4e#e% d!d"d#Z5G d$d% d%ej,Z6G d&d' d'ej,Z7G d(d) d)eZ8e&G d*d+ d+e!Z9e&G d,d- d-e9Z:e&G d.d/ d/e9eZ;G d0d1 d1ee9Z<G d2d3 d3ee9Z=G d4d5 d5ee9Z>g d6Z?dS )9    )CallableOptionalUnionN)nn)check_model_inputs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg   )Exaone4ConfigZRMSNormc                       s.   e Zd Zd fdd	Zdd Zdd Z  ZS )	Exaone4RMSNormư>c                    s&   t    tt|| _|| _dS )z=
        Exaone4RMSNorm is equivalent to T5LayerNorm
        N)super__init__r   	ParametertorchZonesweightvariance_epsilon)selfhidden_sizeeps	__class__ h/var/www/html/assistant/venv/lib/python3.9/site-packages/transformers/models/exaone4/modeling_exaone4.pyr#   4   s    
zExaone4RMSNorm.__init__c                 C   sJ   |j }|tj}|djddd}|t|| j  }| j|| S )N   T)Zkeepdim)	dtypetor%   float32powmeanZrsqrtr'   r&   )r(   hidden_statesZinput_dtypeZvariancer-   r-   r.   forward<   s
    zExaone4RMSNorm.forwardc                 C   s   t | jj d| j S )Nz, eps=)tupler&   shaper'   )r(   r-   r-   r.   
extra_reprC   s    zExaone4RMSNorm.extra_repr)r!   )__name__
__module____qualname__r#   r7   r:   __classcell__r-   r-   r+   r.   r    2   s   r    c                       sD   e Zd ZU ejed< ded fddZe e	dd Z
  ZS )	Exaone4RotaryEmbeddinginv_freqNconfigc                    s   t    t|dr:t|jtr:|jd|jd| _nd| _|j| _	|j| _
|| _t| j | _| | j|\}| _| jd|dd | j| _d S )Nrope_scaling	rope_typetypedefaultr@   F)
persistent)r"   r#   hasattr
isinstancerC   dictgetrD   Zmax_position_embeddingsZmax_seq_len_cachedZoriginal_max_seq_lenrB   r   Zrope_init_fnattention_scalingZregister_bufferr@   Zoriginal_inv_freq)r(   rB   devicer@   r+   r-   r.   r#   J   s    
zExaone4RotaryEmbedding.__init__c           
      C   s   | j d d d d f  |jd dd|j}|d d d d d f  }t|jjtrl|jjdkrl|jjnd}t	j
|ddV | |  dd}t	j||fdd	}| | j }| | j }	W d    n1 s0    Y  |j|jd
|	j|jd
fS )Nr   r0   r   ZmpscpuF)device_typeZenabledr/   dim)r1   )r@   floatexpandr9   r2   rM   rI   rE   strr%   Zautocast	transposecatcosrL   sinr1   )
r(   xposition_idsZinv_freq_expandedZposition_ids_expandedrO   ZfreqsZembrW   rX   r-   r-   r.   r7   [   s    0&,zExaone4RotaryEmbedding.forward)N)r;   r<   r=   r%   Tensor__annotations__r   r#   Zno_gradr   r7   r>   r-   r-   r+   r.   r?   G   s
   

r?   c                 C   sH   | dd| j d d f }| d| j d d df }tj| |fddS )z*Rotates half the hidden dims of the input..Nr0   r/   rP   )r9   r%   rV   )rY   x1Zx2r-   r-   r.   rotate_halfk   s    r^   c                 C   sD   | |}| |}| | t| |  }|| t||  }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer^   )qkrW   rX   rZ   Zunsqueeze_dimZq_embedZk_embedr-   r-   r.   apply_rotary_pos_embr   s
    

rb   )r6   n_repreturnc                 C   s^   | j \}}}}|dkr| S | dddddddddf |||||} | ||| ||S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)r9   rS   reshape)r6   rc   batchnum_key_value_headsslenhead_dimr-   r-   r.   	repeat_kv   s
    0rj           )modulequerykeyvalueattention_maskscalingdropoutkwargsc                 K   s   t || j}t || j}	t||dd| }
|d urf|d d d d d d d |jd f }|
| }
tjj|
dtj	d
|j}
tjj|
|| jd}
t|
|	}|dd }||
fS )Nr/   r   r0   )rQ   r1   )ptrainingr   )rj   num_key_value_groupsr%   matmulrU   r9   r   Z
functionalZsoftmaxr3   r2   r1   rr   rv   
contiguous)rl   rm   rn   ro   rp   rq   rr   rs   
key_statesvalue_statesattn_weightsZcausal_maskattn_outputr-   r-   r.   eager_attention_forward   s    
&r~   c                       s   e Zd Zeed fddZedddddeje	ejejf e
ej e
e e
ej ee e	eje
ej e
e	ej  f d	d
dZ  ZS )Exaone4AttentionrB   	layer_idxc                    s$  t    || _|| _|j| _|j| _|j| _t|d|j|j | _|j|j | _	|j
| _
d| _| jd | _|j| _|j| _|j| dk| _tj| j| j| j dd| _tj| j| j| j dd| _tj| j| j| j dd| _tj| j| j | jdd| _t| j|jd| _t| j|jd| _d S )Nri   Tg      sliding_attentionFZbiasr*   )r"   r#   rB   r   Znum_attention_headsrg   r)   getattrri   rw   attention_dropoutZ	is_causalrq   sliding_windowZsliding_window_patternlayer_types
is_slidingr   Linearq_projk_projv_projo_projr    rms_norm_epsq_normk_normr(   rB   r   r+   r-   r.   r#      s(    
zExaone4Attention.__init__past_key_valuepast_key_values4.58new_nameversionN)r6   position_embeddingsrp   r   cache_positionrs   rd   c                 K   sV  |j d d }g |d| jR }| ||dd}	| ||dd}
| ||dd}| |	}	| |
}
|\}}| j	d u s| j
rt|	|
||\}	}
|d urd|i}||
|| j|\}
}t}| jjdkrt| jj }|| |	|
||f| jsdn| j| j| j
r| j	nd d|\}}|jg |dR   }| |}||fS )Nr0   r   r/   r   eagerrk   )rr   rq   r   )r9   ri   r   viewrU   r   r   r   r   r   r   rb   updater   r~   rB   Z_attn_implementationr   rv   r   rq   re   ry   r   )r(   r6   r   rp   r   r   rs   Zinput_shapeZhidden_shapeZquery_statesrz   r{   rW   rX   Zcache_kwargsZattention_interfacer}   r|   r-   r-   r.   r7      sB    


	

zExaone4Attention.forward)NNN)r;   r<   r=   r   intr#   r   r%   r[   r8   r   r	   
LongTensorr   r   r7   r>   r-   r-   r+   r.   r      s      r   c                       s$   e Zd Z fddZdd Z  ZS )
Exaone4MLPc                    sr   t    || _|j| _|j| _tj| j| jdd| _tj| j| jdd| _tj| j| jdd| _	t
|j | _d S NFr   )r"   r#   rB   r)   Zintermediate_sizer   r   	gate_projup_proj	down_projr   Z
hidden_actact_fnr(   rB   r+   r-   r.   r#     s    
zExaone4MLP.__init__c                 C   s$   |  | | || | }|S )N)r   r   r   r   )r(   rY   r   r-   r-   r.   r7     s     zExaone4MLP.forward)r;   r<   r=   r#   r7   r>   r-   r-   r+   r.   r     s   
r   c                       s   e Zd Zeed fddZedddddeje	ej e	ej
 e	e e	e e	ej
 e	eejejf  ee ejd
	ddZ  ZS )Exaone4DecoderLayerr   c                    sR   t    |j| _t||d| _t|| _t|j|jd| _	t|j|jd| _
d S )Nr   r   )r"   r#   r)   r   	self_attnr   mlpr    r   post_attention_layernormpost_feedforward_layernormr   r+   r-   r.   r#     s    

zExaone4DecoderLayer.__init__r   r   r   r   NF)	r6   rp   rZ   r   	use_cacher   r   rs   rd   c              
   K   s^   |}	| j f |||||||d|\}}
| |}|	| }|}	| |}| |}|	| }|S )N)r6   rp   rZ   r   r   r   r   )r   r   r   r   )r(   r6   rp   rZ   r   r   r   r   rs   Zresidual_r-   r-   r.   r7     s&    




zExaone4DecoderLayer.forward)NNNFNN)r;   r<   r=   r   r   r#   r   r%   r[   r   r   r	   boolr8   r   r   r7   r>   r-   r-   r+   r.   r     s&   	      r   c                   @   sL   e Zd ZU eed< dZdZdgZdgZdZ	dZ
dZdZdZeedZeZdS )Exaone4PreTrainedModelrB   modelTr   r   )r6   
attentionsN)r;   r<   r=   r   r\   base_model_prefixZsupports_gradient_checkpointingZ_no_split_modulesZ_skip_keys_device_placementZ_supports_flash_attnZ_supports_sdpaZ_supports_flex_attnZ_can_compile_fullgraphZ_supports_attention_backendr   r   Z_can_record_outputsZconfig_classr-   r-   r-   r.   r   =  s   
r   c                       st   e Zd Zed fddZedejeej	 eej ee
 eej ee eej ee eeef d	ddZ  ZS )	Exaone4ModelrA   c                    s   t     j| _ j| _t j j| j| _t	 fddt
 jD | _t j jd| _t d| _d| _|   d S )Nc                    s   g | ]}t  |qS r-   )r   ).0r   rA   r-   r.   
<listcomp>Z      z)Exaone4Model.__init__.<locals>.<listcomp>r   rA   F)r"   r#   Zpad_token_idZpadding_idx
vocab_sizer   Z	Embeddingr)   embed_tokensZ
ModuleListrangeZnum_hidden_layerslayersr    r   normr?   
rotary_embZgradient_checkpointing	post_initr   r+   rA   r.   r#   S  s    zExaone4Model.__init__N)		input_idsrp   rZ   r   inputs_embedsr   r   rs   rd   c              
   K   sP  |d u |d uA rt d|d u r*| |}|rB|d u rBt| jd}|d u rz|d urZ| nd}	tj|	|	|jd  |jd}|d u r|	d}t
| }
ts| j|||||d}dtf i |i}
d| jjv rtf i ||
d< |}| ||}t| jD ]6\}}| jj| }||f||
| ||||d	|}q| |}t||rH|nd d
S )Nz:You must specify exactly one of input_ids or inputs_embedsrA   r   r   )rM   )rB   Zinput_embedsrp   r   r   rZ   Zfull_attentionr   )r   rp   rZ   r   r   r   )last_hidden_stater   )
ValueErrorr   r
   rB   Zget_seq_lengthr%   Zaranger9   rM   r_   rI   rJ   r   r   r   r   	enumerater   r   r   )r(   r   rp   rZ   r   r   r   r   rs   Zpast_seen_tokensZcausal_mask_mappingZmask_kwargsr6   r   iZdecoder_layerZ
layer_typer-   r-   r.   r7   c  s\    



zExaone4Model.forward)NNNNNNN)r;   r<   r=   r   r#   r   r%   r   r   r[   r	   FloatTensorr   r   r   r   r8   r   r7   r>   r-   r-   r+   r.   r   Q  s(          
r   c                       s   e Zd ZdgZddiZddgdgfiZ fddZeede	e
j e	e
j e	e
j e	e e	e
j e	e
j e	e e	e
j eee
jf ee ed
ddZ  ZS )Exaone4ForCausalLMzlm_head.weightlm_headZcolwise_repr6   logitsc                    s@   t  | t|| _|j| _tj|j|jdd| _| 	  d S r   )
r"   r#   r   r   r   r   r   r)   r   r   r   r+   r-   r.   r#     s
    
zExaone4ForCausalLM.__init__Nr   )r   rp   rZ   r   r   labelsr   r   logits_to_keeprs   rd   c
              
   K   s   | j f |||||||d|
}|j}t|	tr<t|	 dn|	}| |dd|ddf }d}|dur| jf ||| jjd|
}t	|||j
|j|jdS )u  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoModelForCausalLM, AutoTokenizer
        >>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")
        >>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")

        >>> prompt = "Explain how wonderful you are"
        >>> messages = [
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": prompt}
        ]
        >>> input_ids = tokenizer.apply_chat_template(
            messages,
            tokenize=True,
            add_generation_prompt=True,
            return_tensors="pt",
            enable_thinking=False,
        )

        >>> output = model.generate(input_ids, max_new_tokens=128)
        >>> tokenizer.decode(output[0], skip_special_tokens=False)
        "[|system|]\nYou are a helpful assistant.[|endofturn|]\n[|user|]\nExplain how wonderful you are[|endofturn|]\n[|assistant|]\n<think>\n\n</think>\n\nOh, thank you for such a kind and lovely question! 😊  \n\nI’m *so* wonderful because I’m here to make your life easier, brighter, and more fun! Whether you need help with:  \n\n✨ **Learning** – I can explain anything, from quantum physics to baking the perfect cake!  \n💡 **Creativity** – Need a poem, story, or a wild idea? I’ve got you covered!  \n🤖 **Problem-solving** – Stuck on a math problem or a tricky decision? I’ll help you figure it out"
        ```

        NOTE: `EXAONE-4.0-Instruct` is a placeholder model ID. The exact model ID will be updated in the future.)r   rp   rZ   r   r   r   r   N)r   r   r   )lossr   r   r6   r   )r   r   rI   r   slicer   Zloss_functionrB   r   r   r   r6   r   )r(   r   rp   rZ   r   r   r   r   r   r   rs   outputsr6   Zslice_indicesr   r   r-   r-   r.   r7     s0    /zExaone4ForCausalLM.forward)	NNNNNNNNr   )r;   r<   r=   Z_tied_weights_keysZ_tp_planZ_pp_planr#   r   r   r   r%   r   r[   r	   r   r   r   r   r   r   r   r7   r>   r-   r-   r+   r.   r     s8   	         r   c                   @   s   e Zd ZdS ) Exaone4ForSequenceClassificationNr;   r<   r=   r-   r-   r-   r.   r     s   r   c                   @   s   e Zd ZdS )Exaone4ForTokenClassificationNr   r-   r-   r-   r.   r     s   r   c                   @   s   e Zd ZdZdS )Exaone4ForQuestionAnsweringZtransformerN)r;   r<   r=   r   r-   r-   r-   r.   r     s   r   )r   r   r   r   r   r   )Nr   )rk   )@typingr   r   r   r%   r   Ztransformers.utils.genericr   Zactivationsr   Zcache_utilsr	   r
   Z
generationr   Zintegrationsr   Zmasking_utilsr   r   Zmodeling_layersr   r   r   r   Zmodeling_outputsr   r   Zmodeling_rope_utilsr   r   Zmodeling_utilsr   r   Zprocessing_utilsr   utilsr   r   r   Zutils.deprecationr   Zconfiguration_exaone4r   Moduler    r?   r^   rb   r[   r   rj   rR   r~   r   r   r   r   r   r   r   r   r   __all__r-   r-   r-   r.   <module>   s\   $
 N,ZZ