
    h2_                        d dl mZmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZmZ ddlmZmZ ddlmZmZ ddlmZ ddlmZm Z m!Z! ddl"m#Z# ddl$m%Z% ddl&m'Z'  G d dejP                        Z) G d dejP                        Z* G d dejP                        Z+dejX                  de-dejX                  fdZ.	 d4dejP                  dejX                  d ejX                  d!ejX                  d"eejX                     d#e/d$e/d%ee   fd&Z0d' Z1d5d(Z2 G d) d*ejP                        Z3 G d+ d,e      Z4e  G d- d.e             Z5e  G d/ d0e5             Z6e  G d1 d2e5e             Z7g d3Z8y)6    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg)check_model_inputs   )CohereConfigc                   &     e Zd Zd fd	Zd Z xZS )CohereLayerNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)zcThe hidden size can be a tuple or an int. The tuple is used for QKNorm to normalize across head_dimN)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeepsbias	__class__s       h/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/cohere/modeling_cohere.pyr!   zCohereLayerNorm.__init__4   s/    ll5::k#:; #    c                    |j                   }|j                  t        j                        }|j	                  dd      }||z
  j                  d      j	                  dd      }||z
  t        j                  || j                  z         z  }| j                  j                  t        j                        |z  }|j                  |      S )NT)keepdim   )	dtypetor#   float32meanpowrsqrtr&   r%   )r'   hidden_statesinput_dtyper5   variances        r,   forwardzCohereLayerNorm.forward:   s    #))%((7!!"d!3!D(--a055b$5G&-XH]H]=]1^^u}}5E,,r-   )Ngh㈵>F__name__
__module____qualname__r!   r;   __classcell__r+   s   @r,   r   r   3   s    $-r-   r   c                   ~     e Zd ZU ej                  ed<   ddef fdZ ej                         e	d               Z
 xZS )CohereRotaryEmbeddinginv_freqconfigc                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultrD   F)
persistent)r    r!   hasattr
isinstancerG   dictgetrH   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrE   r   rope_init_fnattention_scalingregister_bufferrD   original_inv_freq)r'   rE   devicerD   r+   s       r,   r!   zCohereRotaryEmbedding.__init__G   s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r-   c                 .   | j                   d d d d f   j                         j                  |j                  d   dd      }|d d d d d f   j                         }t	        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  |dd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j                  |j                   
      	j                  |j                   
      fS # 1 sw Y   AxY w)Nr   r/   r   mpscpuF)device_typeenabledr1   dimr2   )rD   floatexpandshaperM   rW   rI   strr#   autocast	transposerepeat_interleavecosrT   sinr3   r2   )
r'   xposition_idsinv_freq_expandedposition_ids_expandedr[   freqsembrg   rh   s
             r,   r;   zCohereRotaryEmbedding.forwardX   sD    !MM$4-8>>@GGHZHZ[\H]_acde ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))%;C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s   BFFN)r=   r>   r?   r#   Tensor__annotations__r   r!   no_gradr   r;   r@   rA   s   @r,   rC   rC   D   s=    ll/| /" U]]_<  <r-   rC   c                   $     e Zd Z fdZd Z xZS )	CohereMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFr*   )r    r!   rE   r(   intermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnr'   rE   r+   s     r,   r!   zCohereMLP.__init__i   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r-   c                     | j                  | j                  | j                  |            | j                  |      z        }|S ro   )r|   r~   rz   r{   )r'   ri   r|   s      r,   r;   zCohereMLP.forwards   s6    NN4;;t~~a/@#ADLLQRO#ST	r-   r<   rA   s   @r,   rt   rt   h   s    0r-   rt   r8   n_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rb   ra   reshape)r8   r   batchnum_key_value_headsslenhead_dims         r,   	repeat_kvr   x   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr-   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )Nr1   r   r/   )r^   r2   )ptrainingr   )r   num_key_value_groupsr#   matmulre   rb   r   
functionalsoftmaxr4   r3   r2   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r,   eager_attention_forwardr      s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r-   c                     | dd d df   }| ddd df   }t        j                  | |gd      j                  d      }|S )N.r1   r   r/   r]   r   )r#   stackflatten)ri   x1x2rot_xs       r,   rotate_halfr      sL    	
3!8B	
319BKK"b	r*2226ELr-   c                 6   | j                   }| j                         } |j                         }|j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }|j	                  |      |j	                  |      fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    r_   )r2   r`   	unsqueezer   r3   )	qkrg   rh   rj   unsqueeze_dimr2   q_embedk_embeds	            r,   apply_rotary_pos_embr      s    ( GGE		A		A
--
&C
--
&C3w;q>C/0G3w;q>C/0G::E:"GJJUJ$;;;r-   c                   8    e Zd ZdZddedee   f fdZ eddd      	 	 dd	e	j                  d
ee	j                  e	j                  f   dee	j                     dee   dee	j                     dee   dee	j                  ee	j                     f   fd       Z xZS )CohereAttentionz=Multi-headed attention from 'Attention Is All You Need' paperrE   	layer_idxc                 h   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        |j(                  | _        | j(                  ret+        |j                  | j                  f|j,                        | _        t+        |j                  | j                  f|j,                        | _        y y )Nr   g      Trw   r(   r)   )r    r!   rE   r   getattrr(   num_attention_headsr   r   r   r   attention_dropout	is_causalr   ry   attention_biasq_projk_projv_projo_projuse_qk_normr   layer_norm_epsq_normk_normr'   rE   r   r+   s      r,   r!   zCohereAttention.__init__   s   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 "--)#77GVMbMbDK *#77GVMbMbDK r-   past_key_valuepast_key_values4.58new_nameversionr8   position_embeddingsr   cache_positionr   r   c                    |j                   d d }g |d| j                  }| j                  |      j                  |      }	| j	                  |      j                  |      }
| j                  |      j                  |      }| j                  r"| j                  |	      }	| j                  |
      }
|	j                  dd      }	|
j                  dd      }
|j                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j                  j                  dk7  rt         | j                  j                     } || |	|
||f| j"                  sdn| j$                  | j&                  d|\  }} |j(                  g |d j+                         }| j-                  |      }||fS )Nr/   r   r1   )rh   rg   r   eager        )r   r   )rb   r   r   viewr   r   r   r   r   re   r   updater   r   rE   _attn_implementationr   r   r   r   r   r   r   )r'   r8   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rg   rh   cache_kwargsattention_interfacer   r   s                     r,   r;   zCohereAttention.forward   s    $))#2.88b8$--8{{=166|D[[/44\B
{{=166|D;;|4LZ0J#--a3))!Q/
#--a3&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r-   ro   )NN)r=   r>   r?   __doc__r   r   intr!   r   r#   rp   tupler	   
LongTensorr   r   r;   r@   rA   s   @r,   r   r      s    G|  @ %0A6R ,0591)||1) #5<<#=>1) !.	1)
 "%1) !!1!121) -.1) 
u||Xell33	41) S1)r-   r   c                       e Zd Zdedef fdZ eddd      	 	 	 	 	 	 ddej                  d	e	ej                     d
e	ej                     de	e   de	e   de	ej                     de	eej                  ej                  f      dee   deej                   e	eej                   ej                   f      f   fd       Z xZS )CohereDecoderLayerrE   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        y )N)rE   r   r   )
r    r!   r(   r   	self_attnrt   mlpr   r   input_layernormr   s      r,   r!   zCohereDecoderLayer.__init__  sR    !--()LV$.F<N<NU[UjUjkr-   r   r   r   r   r8   r   rj   	use_cacher   r   r   r   c                     |}	| j                  |      } | j                  d|||||||d|\  }
}| j                  |      }|	|
z   |z   }|S )a  
        Args:
            hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
            attention_mask (`torch.FloatTensor`, *optional*):
                attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
                query_sequence_length, key_sequence_length)` if default attention is used.
            past_key_values (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
            output_attentions (`bool`, *optional*):
                Whether or not to return the attentions tensors of all attention layers. See `attentions` under
                returned tensors for more detail.
            use_cache (`bool`, *optional*):
                If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
                (see `past_key_values`).
            cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
                Indices depicting the position of the input sequence tokens in the sequence
            position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
                Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
                with `head_dim` being the embedding dimension of each attention head.
        )r8   r   rj   r   r   r   r    )r   r   r   )r'   r8   r   rj   r   r   r   r   r   residualhidden_states_attention_hidden_states_mlps                r,   r;   zCohereDecoderLayer.forward$  s{    > !,,];%3T^^ 	&
')%+) 3	&
 	&
" !HH]3 #::=NNr-   )NNNFNN)r=   r>   r?   r   r   r!   r   r#   rp   r   r   r	   boolr   r   r   FloatTensorr;   r@   rA   s   @r,   r   r     s   l| l l %0A6R 2637+/$)59KO.||. !.. u//0	.
 "%. D>. !!1!12. &eELL%,,,F&GH. -.. 
u  (51B1BEDUDU1U+V"WW	X. S.r-   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)CoherePreTrainedModelrE   modelTr   r   )r8   
attentionsN)r=   r>   r?   r   rq   base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsr   r-   r,   r   r   V  sQ    &*#-.#4"5N!"&+%r-   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     deej                     d	ee   d
ee   defd              Z xZS )CohereModelrE   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   rE   F)r    r!   pad_token_idpadding_idx
vocab_sizer   	Embeddingr(   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normrC   
rotary_embgradient_checkpointing	post_initr   s      r,   r!   zCohereModel.__init__k  s     !.. ++LL):):F<N<NPTP`P`ammDI&JbJbDcdy	2d
 $1C1C&J_J_`	/v>&+# 	 es   D	input_idsr   rj   r   inputs_embedsr   r   r   r   c           
      B   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|F||j	                         nd}	t        j                  |	|	|j                  d   z   |j                        }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
||||d|} | j                  |      }t        ||      S )	Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r   )rW   )rE   input_embedsr   r   r   rj   )r   rj   r   r   r   )last_hidden_stater   )
ValueErrorr   r
   rE   get_seq_lengthr#   arangerb   rW   r   r   r  r  r  r  r   )r'   r  r   rj   r   r	  r   r   r   past_seen_tokensr   r8   r   decoder_layers                 r,   r;   zCohereModel.forward{  s[    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de+0<< "2]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oom\J![[)H4;;+H+HI 		M)*) /-$7 M		 		-0&++
 	
r-   )NNNNNNN)r=   r>   r?   r   r!   r   r   r   r#   r   rp   r	   r   r   r   r   r   r;   r@   rA   s   @r,   r   r   i  s    |    151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r-   r   c                       e Zd ZdgZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     d	e	e
j                     d
e	eeee
j                      f      de	e
j                      de	e
j                     de	e   de	e   de	e   de	e
j                     deee
j                  f   dee   defd              Z xZS )CohereForCausalLMzlm_head.weightlm_headcolwise_repr8   logitsc                 ,   t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        |j                  | _	        |j                  | _
        | j                          y rv   )r    r!   r   r   r   r   ry   r(   r  logit_scaletie_word_embeddingsr  r   s     r,   r!   zCohereForCausalLM.__init__  sq      (
 ++yy!3!3V5F5FUS!--#)#=#=  	r-   r  r   rj   r   r	  labelsr   output_attentionsoutput_hidden_statesr   logits_to_keepr   r   c                    ||n| j                   j                  }|	|	n| j                   j                  }	 | j                  d||||||||	|
d	|}|j                  }t        |t              rt        | d      n|}| j                  |dd|ddf         }|| j                  z  }d}|* | j                  d||| j                   j                  d|}t        |||j                  |j                  |j                        S )az  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >> from transformers import AutoTokenizer, CohereForCausalLM

        >> model = CohereForCausalLM.from_pretrained("CohereForAI/c4ai-command-r-v01")
        >> tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-v01")

        >> prompt = "Hey, are you conscious? Can you talk to me?"
        >> inputs = tokenizer(prompt, return_tensors="pt")

        >> # Generate
        >> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```N)	r  r   rj   r   r	  r   r  r  r   )r  r  r   )lossr  r   r8   r   r   )rE   r  r  r   r  rM   r   slicer  r  loss_functionr   r   r   r8   r   )r'   r  r   rj   r   r	  r  r   r  r  r   r  r   outputsr8   slice_indicesr  r  s                     r,   r;   zCohereForCausalLM.forward  s+   N 2C1N-TXT_T_TqTq$8$D $++JjJj 	
 ,64:: ,
)%+'/!5),
 ,
  118B>SV8W~ot4]kmA}a,?@A$***%4%%pVFt{{OeOepiopD%#33!//))
 	
r-   )NNNNNNNNNNr   )r=   r>   r?   _tied_weights_keys_tp_plan_pp_planr!   r   r   r   r#   r   rp   r   r	   listr   r   r   r   r   r   r;   r@   rA   s   @r,   r  r    sw   *+=)H_-z:;H	  151537KO59-1$(,0/35934H
E,,-H
 !.H
 u//0	H

 "%tE4E4E/F(F"GHH
   1 12H
 ))*H
 D>H
 $D>H
 'tnH
 !!1!12H
 c5<</0H
 +,H
 
 H
  H
r-   r  )r  r   r   )r   )Nr   )9typingr   r   r   r#   r   activationsr   cache_utilsr	   r
   
generationr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.deprecationr   utils.genericr   configuration_coherer   Moduler   rC   rt   rp   r   r   r`   r   r   r   r   r   r   r   r  __all__r   r-   r,   <module>r9     s  < - ,   ! . ) / B 9 O K F & I I 0 / .-bii -"!<BII !<H		  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4<<U)bii U)p73 7t O  $ K
' K
 K
\ Z
- Z
 Z
z Hr-   