
    hP                        d dl mZmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZ dd
lmZmZmZmZ ddlmZmZ ddlmZmZ ddlmZmZ ddlm Z  ddl!m"Z"m#Z#m$Z$m%Z% ddl&m'Z' ddl(m)Z) ddl*m+Z+  e%jX                  e-      Z. ed       G d dej^                               Z0 G d dej^                        Z1d Z2d;dZ3 G d dej^                        Z4dejj                  de6dejj                  fd Z7	 d<d!ej^                  d"ejj                  d#ejj                  d$ejj                  d%eejj                     d&e8d'e8d(e e"   fd)Z9 G d* d+ej^                        Z: G d, d-e      Z;e# G d. d/e             Z<e# G d0 d1e<             Z=e# G d2 d3e<e             Z> G d4 d5ee<      Z? G d6 d7ee<      Z@ G d8 d9ee<      ZAg d:ZBy)=    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)deprecate_kwarg)check_model_inputs   )LlamaConfigRMSNormc                   ,     e Zd Zd fd	Zd Zd Z xZS )LlamaRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z;
        LlamaRMSNorm is equivalent to T5LayerNorm
        N)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__s      f/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/llama/modeling_llama.pyr&   zLlamaRMSNorm.__init__6   s1     	ll5::k#:; #    c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )N   T)keepdim)	dtypetor(   float32powmeanrsqrtr+   r*   )r,   hidden_statesinput_dtypevariances       r0   forwardzLlamaRMSNorm.forward>   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r1   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)tupler*   shaper+   )r,   s    r0   
extra_reprzLlamaRMSNorm.extra_reprE   s*    ))*+6$2G2G1HIIr1   )gư>)__name__
__module____qualname__r&   r?   rC   __classcell__r/   s   @r0   r#   r#   4   s    $;Jr1   r#   c                   ~     e Zd ZU ej                  ed<   ddef fdZ ej                         e	d               Z
 xZS )LlamaRotaryEmbeddinginv_freqconfigc                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultrK   F)
persistent)r%   r&   hasattr
isinstancerN   dictgetrO   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrL   r   rope_init_fnattention_scalingregister_bufferrK   original_inv_freq)r,   rL   devicerK   r/   s       r0   r&   zLlamaRotaryEmbedding.__init__L   s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r1   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   r4   r   mpscpuF)device_typeenabledr3   dim)r6   )rK   floatexpandrB   r7   r^   rT   rP   strr(   autocast	transposecatcosr[   sinr6   )
r,   xposition_idsinv_freq_expandedposition_ids_expandedrb   freqsembrl   rm   s
             r0   r?   zLlamaRotaryEmbedding.forward]   sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.N)rD   rE   rF   r(   Tensor__annotations__r    r&   no_gradr   r?   rG   rH   s   @r0   rJ   rJ   I   s=    ll/{ /" U]]_<  <r1   rJ   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..Nr4   r3   rd   )rB   r(   rk   )rn   x1x2s      r0   rotate_halfr{   m   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r1   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezer{   )qkrl   rm   ro   unsqueeze_dimq_embedk_embeds           r0   apply_rotary_pos_embr   t   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr1   c                   $     e Zd Z fdZd Z xZS )LlamaMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  |j                        | _        t        j                  | j                  | j                  |j                        | _	        t        j                  | j                  | j                  |j                        | _
        t        |j                     | _        y )Nbias)r%   r&   rL   r-   intermediate_sizer   Linearmlp_bias	gate_projup_proj	down_projr   
hidden_actact_fnr,   rL   r/   s     r0   r&   zLlamaMLP.__init__   s    !--!'!9!94#3#3T5K5KRXRaRabyy!1!143I3IPVP_P_`4#9#94;K;KRXRaRabV../r1   c                     | j                  | j                  | j                  |            | j                  |      z        }|S rt   )r   r   r   r   )r,   rn   r   s      r0   r?   zLlamaMLP.forward   s6    NN4;;t~~a/@#ADLLQRO#ST	r1   )rD   rE   rF   r&   r?   rG   rH   s   @r0   r   r      s    0r1   r   r<   n_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rB   rg   reshape)r<   r   batchnum_key_value_headsslenhead_dims         r0   	repeat_kvr      so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr1   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr3   r   r4   )re   r6   )ptrainingr   )r   num_key_value_groupsr(   matmulrj   rB   r   
functionalsoftmaxr8   r7   r6   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r0   eager_attention_forwardr      s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r1   c                   *    e Zd ZdZdedef fdZ eddd      	 	 dd	ej                  d
e
ej                  ej                  f   deej                     dee   deej                     dee   de
ej                  ej                  f   fd       Z xZS )LlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperrL   	layer_idxc                 d   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        y )Nr   g      Tr   )r%   r&   rL   r   getattrr-   num_attention_headsr   r   r   r   attention_dropout	is_causalr   r   attention_biasq_projk_projv_projo_projr,   rL   r   r/   s      r0   r&   zLlamaAttention.__init__   sM   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
r1   past_key_valuepast_key_values4.58new_nameversionr<   position_embeddingsr   cache_positionr   r   c                 4   |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j                  j                  dk7  rt        | j                  j                     } || |	|
||f| j                  sdn| j                  | j                   d|\  }} |j"                  g |d j%                         }| j'                  |      }||fS )Nr4   r   r3   )rm   rl   r   eager        )r   r   )rB   r   r   viewrj   r   r   r   updater   r   rL   _attn_implementationr   r   r   r   r   r   r   )r,   r<   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rl   rm   cache_kwargsattention_interfacer   r   s                     r0   r?   zLlamaAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r1   )NN)rD   rE   rF   __doc__r    intr&   r   r(   ru   rA   r   r	   
LongTensorr   r   r?   rG   rH   s   @r0   r   r      s    G
{ 
s 
. %0A6R ,059))||)) #5<<#=>)) !.	))
 "%)) !!1!12)) +,)) 
u||U\\)	*)) S))r1   r   c                   >    e Zd Zdedef fdZ eddd      	 	 	 	 	 	 ddej                  d	e	ej                     d
e	ej                     de	e   de	e   de	ej                     de	eej                  ej                  f      dee   dej                  fd       Z xZS )LlamaDecoderLayerrL   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        y )N)rL   r   r.   )r%   r&   r-   r   	self_attnr   mlpr#   rms_norm_epsinput_layernormpost_attention_layernormr   s      r0   r&   zLlamaDecoderLayer.__init__  sl    !--'vKF#+F,>,>FDWDWX(4V5G5GVM`M`(a%r1   r   r   r   r   r<   r   ro   	use_cacher   r   r   r   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)r<   r   ro   r   r   r   r    )r   r   r   r   )r,   r<   r   ro   r   r   r   r   r   residual_s              r0   r?   zLlamaDecoderLayer.forward  s     !,,];)4>> 	
')%+) 3	
 	
q !=0 !55mD/ =0r1   )NNNFNN)rD   rE   rF   r    r   r&   r   r(   ru   r   r   r	   boolrA   r   r   r?   rG   rH   s   @r0   r   r     s    b{ bs b %0A6R 2637+/$)59KO|| !. u//0	
 "% D> !!1!12 &eELL%,,,F&GH +, 
 Sr1   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)LlamaPreTrainedModelrL   modelTr   r   )r<   
attentionsN)rD   rE   rF   r    rv   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r1   r0   r   r   :  sQ    &*#,-#4"5N!"&*$r1   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     deej                     d	ee   d
ee   defd              Z xZS )
LlamaModelrL   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   rL   F)r%   r&   pad_token_idpadding_idx
vocab_sizer   	Embeddingr-   embed_tokens
ModuleListrangenum_hidden_layersr   layersr#   r   normrJ   
rotary_embgradient_checkpointing	post_initr   s      r0   r&   zLlamaModel.__init__O  s     !.. ++LL):):F<N<NPTP`P`ammCHIaIaCbcivy1c
 !!3!39L9LM	.f=&+# 	 ds   D	input_idsr   ro   r   inputs_embedsr   r   r   r   c           
      B   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|F||j	                         nd}	t        j                  |	|	|j                  d   z   |j                        }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
||||d|} | j                  |      }t        ||      S )	Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )r^   )rL   input_embedsr   r   r   ro   )r   ro   r   r   r   )last_hidden_stater   )
ValueErrorr   r
   rL   get_seq_lengthr(   arangerB   r^   r}   r   r  r   r   r  r   )r,   r  r   ro   r   r  r   r   r   past_seen_tokensr   r<   r   decoder_layers                 r0   r?   zLlamaModel.forward_  s[    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de+0<< "2]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oom\J![[)H4;;+H+HI 		M)*) /-$7 M		 		-0&++
 	
r1   )NNNNNNN)rD   rE   rF   r    r&   r   r   r   r(   r   ru   r	   FloatTensorr   r   r   r   r?   rG   rH   s   @r0   r   r   M  s    {    151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r1   r   c                   d    e Zd ZdgZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     d	e	e
j                     d
e	e   de	e
j                     de	e
j                     de	e   de	e
j                     deee
j                  f   dee   defd              Z xZS )LlamaForCausalLMzlm_head.weightlm_headcolwise_repr<   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y )NFr   )
r%   r&   r   r   r   r   r   r-   r  r  r   s     r0   r&   zLlamaForCausalLM.__init__  sU     '
 ++yy!3!3V5F5FUS 	r1   r  r   ro   r   r  labelsr   r   logits_to_keepr   r   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, LlamaForCausalLM

        >>> model = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r  r   ro   r   r  r   r   N)r  r  r   )lossr  r   r<   r   r   )r   r	  rT   r   slicer  loss_functionrL   r   r   r   r<   r   )r,   r  r   ro   r   r  r  r   r   r  r   outputsr<   slice_indicesr  r  s                   r0   r?   zLlamaForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r1   )	NNNNNNNNr   )rD   rE   rF   _tied_weights_keys_tp_plan_pp_planr&   r   r   r   r(   r   ru   r	   r  r   r   r   r   r   r   r?   rG   rH   s   @r0   r  r    s0   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r1   r  c                       e Zd Zy)LlamaForSequenceClassificationNrD   rE   rF   r   r1   r0   r"  r"        r1   r"  c                       e Zd ZdZy)LlamaForQuestionAnsweringtransformerN)rD   rE   rF   r   r   r1   r0   r&  r&    s    %r1   r&  c                       e Zd Zy)LlamaForTokenClassificationNr#  r   r1   r0   r)  r)    r$  r1   r)  )r  r   r   r"  r&  r)  )Nr   )r   )Ctypingr   r   r   r(   r   activationsr   cache_utilsr	   r
   
generationr   integrationsr   masking_utilsr   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   utils.deprecationr   utils.genericr   configuration_llamar    
get_loggerrD   loggerModuler#   rJ   r{   r   r   ru   r   r   rf   r   r   r   r   r   r  r"  r&  r)  __all__r   r1   r0   <module>r=     s   ( - ,   ! . ) 7 /  L F & R R 0 / , 
		H	% Y'J299 J (J(!<299 !<H(6ryy  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4D)RYY D)N+2 +\ ?  $ K
% K
 K
\ H
+_ H
 H
V b%EG[ a& ;=Q & \"?AU [r1   