
    hh                     B   d dl Z d dlmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
mZ ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddl m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z'm(Z( ddl)m*Z* ddl+m,Z, ddl-m.Z.  e(j^                  e0      Z1 G d dejd                        Z3d Z4d9dZ5dejl                  de7dejl                  fdZ8d Z9 G d dejd                        Z: G d  d!e:      Z; G d" d#e:      Z< ed$       G d% d&ejd                               Z=e:e;e<d'Z> G d( d)e      Z?e& G d* d+e!             Z@ G d, d-ejd                        ZAe& G d. d/e@             ZBe& G d0 d1e@e             ZC G d2 d3ee@      ZD G d4 d5ee@      ZE G d6 d7ee@      ZFg d8ZGy):    N)OptionalUnion)nn   )ACT2FN)CacheDynamicCacheStaticCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)_flash_attention_forward!flash_attn_supports_top_left_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)PreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)deprecate_kwarg)check_model_inputs   )DiffLlamaConfigc                   $     e Zd Z fdZd Z xZS )DiffLlamaMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnselfr*   	__class__s     n/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/diffllama/modeling_diffllama.pyr)   zDiffLlamaMLP.__init__8   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../    c                     | j                  | j                  | j                  |            | j                  |      z        }|S N)r0   r2   r.   r/   )r4   xr0   s      r6   forwardzDiffLlamaMLP.forwardB   s6    NN4;;t~~a/@#ADLLQRO#ST	r7   )__name__
__module____qualname__r)   r;   __classcell__r5   s   @r6   r#   r#   7   s    0r7   r#   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r:   x1x2s      r6   rotate_halfrK   G   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r7   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerK   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r6   apply_rotary_pos_embrV   N   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr7   hidden_statesn_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r    N)rF   expandreshape)rW   rX   batchnum_key_value_headsslenhead_dims         r6   	repeat_kvra   i   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr7   c                 >    ddt        j                  d| z        z  z
  S )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r6   lambda_init_fnrf   u   s     txxy 01111r7   c                   x    e Zd ZdZddedee   f fdZ eddd      	 	 	 	 	 dd	e	j                  d
ee	j                  e	j                  f   dee	j                     dee	j                     dee   dedee	j                     dee	j                  ee	j                     eee	j                        f   fd       Z xZS )DiffLlamaAttentionz=Multi-headed attention from 'Attention Is All You Need' paperr*   re   c                    t         |           || _        || _        |-t        j                  d| j                  j                   d       |j                  | _        |j                  | _	        |j                  | _        t        |d| j                  | j                  z        | _        |j                  | _        | j                  | j                  z  | _        |j                   | _        |j"                  | _        d| _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  | j                  z  |j*                        | _        t'        j(                  | j                  | j                  z  | j                  |j*                        | _        t5        |      | _        t'        j8                  t;        j<                  d|j>                  | j                  f            | _         t'        j8                  t;        j<                  d|j>                  | j                  f            | _!        t'        j8                  t;        j<                  d|j>                  | j                  f            | _"        t'        j8                  t;        j<                  d|j>                  | j                  f            | _#        t'        jH                  d| j                  z  |jJ                  d	
      | _&        y )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.r`   Tr&   r   )sizerC   F)epselementwise_affine)'r(   r)   r*   re   loggerwarning_oncer5   r<   attention_dropoutr+   num_attention_heads	num_headsgetattrr`   r^   num_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   r-   attention_biasq_projk_projv_projo_projrf   lambda_init	ParameterrG   normallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormr4   r*   re   r5   s      r6   r)   zDiffLlamaAttention.__init__|   s~   " !8!8 9 :, , "(!9!9!--33
D4D4D4VW#)#=#= $(NNd6N6N$N!'-'E'E$ ++ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii >@P@PW]WlWlm))4ell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdA$56;N;Nchir7   past_key_valuepast_key_values4.58new_nameversionrW   position_embeddingsattention_maskrR   	use_cachecache_positionrY   c                    |j                         \  }	}
}|
}| j                  |      }| j                  |      }| j                  |      }|j	                  |	|| j
                  | j                        j                  dd      }|j	                  |	|| j                  | j                        j                  dd      }|j	                  |	|| j                  | j                        j                  dd      }|\  }}t        ||||      \  }}|'|||d}|j                  ||| j                  |      \  }}t        || j                        }t        || j                        }t        j                  t        j                   |dd      d      }|j#                  dddd      }t        j$                  ||j                  dd            t'        j(                  | j                        z  }|#|d d d d d d d |j*                  d   f   }||z   }t,        j.                  j1                  |dt        j2                        j5                  |j6                        }t,        j.                  j9                  || j:                  | j<                  	      }t        j>                  t        j@                  | jB                  | jD                  z  dt        j2                              j5                  |j6                        }t        j>                  t        j@                  | jF                  | jH                  z  dt        j2                              j5                  |j6                        }||z
  | jJ                  z   }t        j$                  ||      }t        j                   |dd      \  }}|||z  z
  }d| jJ                  z
  | jM                  |      z  }|j                  dd      jO                         }|jQ                  |	|d      }| jS                  |      }||fS )
Nr    rC   rQ   rP   r   rD   rB   r   rE   dtype)ptraining)*rj   rx   ry   rz   viewrq   r`   	transposer^   rV   updatere   ra   rs   rG   rH   chunkrepeatmatmulrc   sqrtrF   r   
functionalsoftmaxfloat32tor   dropoutro   r   rd   sumr   r   r   r   r|   r   
contiguousr\   r{   )r4   rW   r   r   rR   r   r   r   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrP   rQ   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r6   r;   zDiffLlamaAttention.forward   s|    +//1Z{{=1[[/
{{=1#((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$Jz4+D+DE
 t/H/HIyy\1!!D"M#**1aA6||L*2F2Fq!2LMPTPYPYZ^ZgZgPhh%(Aq2HJ4D4DR4H2H)HIK'+5L }},,\r,WZZ[g[m[mn}},,\T=S=S^b^k^k,l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<ll<>%*[[aQ%G"l"[<%??4+++t~~k/JJ!++Aq1<<>!))#ub9kk+.L((r7   r9   NNNFN)r<   r=   r>   __doc__r!   r   intr)   r   rG   Tensortuple
LongTensorr   boolr;   r?   r@   s   @r6   rh   rh   y   s    G j  j8C=  jD %0A6R
 2637+/59<)||<) #5<<#=><) !.	<)
 u//0<) "%<) <) !!1!12<) 
u||Xell3XeELL>Q5RR	S<) S<)r7   rh   c                   *    e Zd ZdZ fdZ eddd      	 	 	 	 	 ddej                  d	eej                  ej                  f   d
e	ej                     de	ej                     de	e   dede	ej                     deej                  df   fd       Z xZS )DiffLlamaFlashAttention2aN  
    DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
    untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
    flash attention and deal with padding tokens in case the input contains any of them.
    c                 B    t        |   |i | t               | _        y r9   )r(   r)   r   _flash_attn_uses_top_left_mask)r4   argsr   r5   s      r6   r)   z!DiffLlamaFlashAttention2.__init__   s#    $)&)
 /P.Q+r7   r   r   r   r   NrW   r   r   rR   r   r   rY   c                 @
   t        |t              rt        d      |j                         \  }}	}
| j	                  |      }| j                  |      }| j                  |      }|j                  ||	| j                  | j                        j                  dd      }|j                  ||	| j                  | j                        j                  dd      }|j                  ||	| j                  | j                        j                  dd      }|+t        j                  d       | j                  ||      \  }}n|\  }}t        ||||      \  }}|'|||d}|j!                  ||| j"                  |      \  }}|j                  dd      }|j                  dd      }|j                  dd      }| j$                  r| j&                  nd}|j(                  }|j*                  j,                  dk7  r|j*                  j,                  nd}|t.        j0                  k(  rt/        j2                         r:t5        t.        d	      rt/        j6                  |      nt/        j8                         }nMt5        | j:                  d
      r| j:                  j<                  }n | j                  j>                  j(                  }t        j                  d| d       |jA                  |      }|jA                  |      }|jA                  |      }t/        jB                  |dd      \  }}|jE                  dddd      }|jE                  dddd      }tG        |||||	||tI        | dd       | jJ                  | jL                  
      }tG        |||||	||tI        | dd       | jJ                  | jL                  
      }t/        jN                  ||gd      }t/        jB                  |dd      \  }}t/        jP                  t/        jR                  | jT                  | jV                  z  dt.        j0                              jA                  |j(                        }t/        jP                  t/        jR                  | jX                  | jZ                  z  dt.        j0                              jA                  |j(                        }||z
  | j\                  z   }|||z  z
  }d| j\                  z
  | j_                  |      z  }|ja                  ||	d      jc                         }| je                  |      }|d fS )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr    rC   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.r           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rD   sliding_window)rR   r   r   use_top_left_maskrv   rB   r   )3
isinstancer
   
ValueErrorrj   rx   ry   rz   r   rq   r`   r   r^   rm   rn   
rotary_embrV   r   re   r   ro   r   devicetyperG   r   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper*   r   weightr   r   r   r   rr   r   rv   rH   rd   r   r   r   r   r   r|   r   r\   r   r{   )r4   rW   r   r   rR   r   r   r   r   r   r   r   r   r   rP   rQ   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r6   r;   z DiffLlamaFlashAttention2.forward   s    o{3} 
 &**,UA{{=1[[/
{{=1
 $((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&G |\BHC*HC#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J $--a3))!Q/
#--a315t--C #((2>2E2E2J2Je2Sl))..Y^%--'((* u&:; ,,[9557  &?@#{{BB#{{1177 >$ (??<8L#|4J'??<8L',{{<'J$}%,,Q1a8%,,Q1a8/% "4)94@"AAnn
 0% "4)94@"AAnn
 ii| <"E%*[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!))#ub9DDFkk+.D  r7   r   )r<   r=   r>   r   r)   r   rG   r   r   r   r   r   r   r;   r?   r@   s   @r6   r   r      s    R %0A6R
 6:37+/59B!||B! #5<<#=>B! !!1!12	B!
 u//0B! "%B! B! !!1!12B! 
u||T!	"B! SB!r7   r   c                   T   e Zd ZdZ eddd      	 	 	 	 	 ddej                  deej                  ej                  f   d	eej                     d
eej                     dee
   dedeej                     deej                  eej                     eeej                        f   fd       Zy)DiffLlamaSdpaAttentiona   
    DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
    `DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
    SDPA API.
    r   r   r   r   NrW   r   r   rR   r   r   rY   c                    |j                         \  }	}
}| j                  |      }| j                  |      }| j                  |      }|j	                  |	|
| j
                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }|j	                  |	|
| j                  | j                        j                  dd      }|\  }}t        ||||      \  }}|'|||d}|j                  ||| j                  |      \  }}t        || j                        }t        || j                        }t        j                  t        j                   |dd      d      }|j#                  dddd      }|}||d d d d d d d |j$                  d   f   }|j&                  j(                  dk(  r2|0|j+                         }|j+                         }|j+                         }|d u xr |
dkD  }t        j,                  j.                  j1                  ||||| j2                  r| j4                  nd|	      }t        j                   |dd      \  }}t        j6                  t        j8                  | j:                  | j<                  z  dt        j>                  
            jA                  |jB                        }t        j6                  t        j8                  | jD                  | jF                  z  dt        j>                  
            jA                  |jB                        }||z
  | jH                  z   }|||z  z
  }d| jH                  z
  | jK                  |      z  }|j                  dd      j+                         }|j	                  |	|
d      }| jM                  |      }|d fS )Nr    rC   r   rD   rB   r   cudar   )	attn_mask	dropout_prv   r   )'rj   rx   ry   rz   r   rq   r`   r   r^   rV   r   re   ra   rs   rG   rH   r   r   rF   r   r   r   r   r   scaled_dot_product_attentionr   ro   rd   r   r   r   r   r   r   r   r   r|   r   r{   )r4   rW   r   r   rR   r   r   r   r   r   r   r   r   r   r   rP   rQ   r   r   rv   r   r   r   r   r   r   s                             r6   r;   zDiffLlamaSdpaAttention.forward{  sm    &**,UA{{=1[[/
{{=1#((eT^^T]]S]]^_abc__S%1I1I4==Yccdeghi
#((eT5M5Mt}}]gghiklm&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$Jz4+D+DE
 t/H/HIyy\1!!D"M#**1aA6$%%aA/E1A1A"1E/E&EFK ##v-+2I'224L#..0J'224L  4'5EAI	hh))FF!04d,,3 G 
 &+[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!++Aq1<<>!&&sE26kk+.D  r7   r   )r<   r=   r>   r   r   rG   r   r   r   r   r   r   r;    r7   r6   r   r   s  s     %0A6R
 2637+/59I!||I! #5<<#=>I! !.	I!
 u//0I! "%I! I! !!1!12I! 
u||Xell3XeELL>Q5RR	SI! SI!r7   r   r   c                   ,     e Zd Zd fd	Zd Zd Z xZS )DiffLlamaRMSNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z?
        DiffLlamaRMSNorm is equivalent to T5LayerNorm
        N)r(   r)   r   r}   rG   onesr   variance_epsilon)r4   r+   rk   r5   s      r6   r)   zDiffLlamaRMSNorm.__init__  s1     	ll5::k#:; #r7   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )NrC   rB   T)keepdim)	r   r   rG   r   powmeanrsqrtr   r   )r4   rW   r   variances       r6   r;   zDiffLlamaRMSNorm.forward  sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r7   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)r   r   rF   r   )r4   s    r6   
extra_reprzDiffLlamaRMSNorm.extra_repr  s*    ))*+6$2G2G1HIIr7   )gư>)r<   r=   r>   r)   r;   r   r?   r@   s   @r6   r   r     s    $;Jr7   r   )eagerflash_attention_2sdpac                   >    e Zd Zdedef fdZ eddd      	 	 	 	 	 	 ddej                  d	e	ej                     d
e	ej                     de	e   de	e   de	ej                     de	eej                  ej                  f      dee   dej                  fd       Z xZS )DiffLlamaDecoderLayerr*   re   c                 :   t         |           |j                  | _        t        |j                     ||      | _        t        |      | _        t        |j                  |j                        | _
        t        |j                  |j                        | _        y )N)r*   re   rk   )r(   r)   r+   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnr#   mlpr   r   input_layernormpost_attention_layernormr   s      r6   r)   zDiffLlamaDecoderLayer.__init__  sz    !--4V5P5PQY_ktu'/0B0BH[H[\(89K9KQWQdQd(e%r7   r   r   r   r   rW   r   rR   r   r   r   r   rY   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)rW   r   rR   r   r   r   r   r   )r   r   r   r   )r4   rW   r   rR   r   r   r   r   r   residualr   s              r6   r;   zDiffLlamaDecoderLayer.forward  s     !,,];)4>> 	
')%+) 3	
 	
q !=0 !55mD/ =0r7   )NNNFNN)r<   r=   r>   r!   r   r)   r   rG   r   r   r   r   r   r   r   r   r;   r?   r@   s   @r6   r   r     s    f f3 f %0A6R 2637+/$)59KO|| !. u//0	
 "% D> !!1!12 &eELL%,,,F&GH +, 
 Sr7   r   c                   \     e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZ fdZ xZS )	DiffLlamaPreTrainedModelr*   modelTr   r   F)rW   
attentionsc                    t         |   |       t        |t              r|j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         |j                  j
                  j                  d| j                  j                         y y )Nr   )r(   _init_weightsr   rh   r   datanormal_r*   r   r   r   r   )r4   moduler5   s     r6   r  z&DiffLlamaPreTrainedModel._init_weights$  s    f%f01!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH	 2r7   )r<   r=   r>   r!   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   rh   _can_record_outputsr  r?   r@   s   @r6   r  r    s^    &*#01#4"5N!"'.(
I Ir7   r  c                   ~     e Zd ZU ej                  ed<   ddef fdZ ej                         e	d               Z
 xZS )DiffLlamaRotaryEmbeddinginv_freqr*   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typer   defaultr  F)
persistent)r(   r)   r   r   r  dictgetr  rt   max_seq_len_cachedoriginal_max_seq_lenr*   r   rope_init_fnattention_scalingregister_bufferr  original_inv_freq)r4   r*   r   r  r5   s       r6   r)   z!DiffLlamaRotaryEmbedding.__init__0  s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r7   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   rB   r    r   r   F)r   enabledrC   rD   )r   )r  floatr[   rF   r   r   r   r   strrG   autocastr   rH   rP   r!  rQ   r   )
r4   r:   rR   inv_freq_expandedposition_ids_expandedr   freqsembrP   rQ   s
             r6   r;   z DiffLlamaRotaryEmbedding.forwardA  sV    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	5&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C		5 vvAGGv$cff177f&;;;	5 	5s    BF%%F.r9   )r<   r=   r>   rG   r   r	  r!   r)   no_gradr   r;   r?   r@   s   @r6   r  r  -  s=    ll/ /" U]]_<  <r7   r  c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     deej                     d	ee   d
ee   defd              Z xZS )DiffLlamaModelr*   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   r*   F)r(   r)   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr  r   gradient_checkpointing	post_initr   s      r6   r)   zDiffLlamaModel.__init__S  s     !.. ++LL):):F<N<NPTP`P`ammGLVMeMeGfg)"695g
 %V%7%7V=P=PQ	2&A&+# 	 hs   D	input_idsr   rR   r   inputs_embedsr   r   r   rY   c           
      B   |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|F||j	                         nd}	t        j                  |	|	|j                  d   z   |j                        }||j                  d      }t        | j                  |||||      }
|}| j                  ||      }| j                  d | j                  j                   D ]  } ||f|
||||d|} | j                  |      }t        ||      S )	Nz:You must specify exactly one of input_ids or inputs_embedsr1  r   r    )r   )r*   input_embedsr   r   r   rR   )r   rR   r   r   r   )last_hidden_stater   )r   r6  r	   r*   get_seq_lengthrG   arangerF   r   rM   r   r   r:  r9  r;  r   )r4   r>  r   rR   r   r?  r   r   r   past_seen_tokensr   rW   r   decoder_layers                 r6   r;   zDiffLlamaModel.forwardc  s[    -t";<YZZ *.*;*;I*FM0*$++>O!CRC^==?de+0<< "2]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oom\J![[)H4;;+H+HI 		M)*) /-$7 M		 		-0&++
 	
r7   )NNNNNNN)r<   r=   r>   r!   r)   r   r   r   rG   r   r   r   FloatTensorr   r   r   r   r;   r?   r@   s   @r6   r/  r/  Q  s        151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r7   r/  c                   d    e Zd ZdgZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     d	e	e
j                     d
e	e   de	e
j                     de	e
j                     de	e   de	e
j                     deee
j                  f   dee   defd              Z xZS )DiffLlamaForCausalLMzlm_head.weightlm_headcolwise_reprW   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y r%   )
r(   r)   r/  r  r4  r   r-   r+   rJ  r=  r3   s     r6   r)   zDiffLlamaForCausalLM.__init__  sU     #F+
 ++yy!3!3V5F5FUS 	r7   r>  r   rR   r   r?  labelsr   r   logits_to_keepr   rY   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, DiffLlamaForCausalLM

        >>> model = DiffLlamaForCausalLM.from_pretrained("google/diffllama-7b")
        >>> tokenizer = AutoTokenizer.from_pretrained("google/diffllama-7b")

        >>> prompt = "What is your favorite condiment?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "What is your favorite condiment?"
        ```)r>  r   rR   r   r?  r   r   N)rL  rN  r4  )lossrL  r   rW   r  r   )r  rB  r   r   slicerJ  loss_functionr*   r4  r   r   rW   r  )r4   r>  r   rR   r   r?  rN  r   r   rO  r   outputsrW   slice_indicesrL  rQ  s                   r6   r;   zDiffLlamaForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r7   )	NNNNNNNNr   )r<   r=   r>   _tied_weights_keys_tp_plan_pp_planr)   r   r   r   rG   r   r   r   rG  r   r   r   r   r   r   r;   r?   r@   s   @r6   rI  rI    s0   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r7   rI  c                       e Zd Zy)"DiffLlamaForSequenceClassificationNr<   r=   r>   r   r7   r6   rZ  rZ        r7   rZ  c                       e Zd ZdZy)DiffLlamaForQuestionAnsweringtransformerN)r<   r=   r>   r
  r   r7   r6   r^  r^    s    %r7   r^  c                       e Zd Zy)DiffLlamaForTokenClassificationNr[  r   r7   r6   ra  ra    r\  r7   ra  )r  r/  rI  rZ  r^  ra  )Nr    )Hrc   typingr   r   rG   r   activationsr   cache_utilsr   r	   r
   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   r   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   processing_utilsr   utilsr   r   r   r   utils.deprecationr   utils.genericr   configuration_diffllamar!   
get_loggerr<   rm   Moduler#   rK   rV   r   r   ra   rf   rh   r   r   r   r   r   r  r  r/  rI  rZ  r^  ra  __all__r   r7   r6   <module>ru     s  0  "   ! ; ; ) 7 / i  P K - & R R 0 / 4 
		H	%299  (6	UU\\ 	U# 	U%,, 	U2b) b)JR!1 R!jR!/ R!j Y'Jryy J (J*  1" +6 +\ I I I4!<ryy !<H K
- K
 K
\ H
3_ H
 H
V	)IKc 	&$?AY &	&CE] 	r7   