
    hr                        d dl mZmZmZmZ d dlZd dlmZ ddlm	Z	m
Z
mZ ddlmZ ddlmZ ddlmZ ddlmZ dd	lmZmZmZmZmZmZ dd
lmZmZ ddlmZ ddl m!Z!m"Z"m#Z#m$Z$m%Z% ddl&m'Z' ddl(m)Z)m*Z* ddl+m,Z, ddl-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5 dZ6 e%jn                  e8      Z9 G d de,      Z: G d de      Z; G d de1      Z< G d de/      Z= G d de2      Z> G d de.      Z? G d d e.      Z@d!eej                     d"efd#ZBd$eCd"efd%ZD G d& d'e      ZE G d( d)eE      ZF G d* d+ej                        ZH G d, d-ej                        ZIe" G d. d/e0             ZJd0eej                     d1ej                  d2eeC   d"ej                  fd3ZL G d4 d5eJ      ZM G d6 d7eM      ZNe" G d8 d9eJ             ZOe" G d: d;eJ             ZP G d< d=eJe      ZQe" G d> d?eJ             ZRe" G d@ dAeJ             ZSg dBZTy)C    )AnyCallableOptionalUnionN   )CacheDynamicCacheEncoderDecoderCache)PretrainedConfig)GenerationMixin)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutput)BaseModelOutputWithPastAndCrossAttentionsSeq2SeqLMOutputSeq2SeqModelOutputSequenceClassifierOutputTokenClassifierOutput)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tupleis_torchdynamo_compilinglogging)deprecate_kwarg)OutputRecordercheck_model_inputs   )Gemma2Config)Gemma2Attention	Gemma2MLPGemma2PreTrainedModelGemma2RMSNormGemma2RotaryEmbeddingcreate_causal_mask!create_sliding_window_causal_maskeager_attention_forwardz google/t5gemma-2b-2b-prefixlm-itc                       e Zd Zy)T5GemmaModuleConfigN__name__
__module____qualname__     i/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/t5gemma/modular_t5gemma.pyr+   r+   @       r1   r+   c                   \    e Zd ZdZdZdgZi dddddddd	d
ddddd	dddddddd	dddddddd	dddddd	iZdgdgfddgdgfdgdgfdgdgfddgdgfdgdgfdZ	 	 	 	 	 	 	 	 d(dee	e
eeef   f      dee	e
eeef   f      ded ed!ed"ed#ed$ef fd%Z fd&Zd' Z xZS ))T5GemmaConfiga  
    This is the configuration class to store the configuration of a [`T5GemmaModel`]. It is used to instantiate an T5Gemma
    model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
    defaults will yield a similar configuration to a hypothetical balanced Gemma2 encoder-decoder model.
    e.g. [google/t5gemma-2b-2b-prefixlm-it](https://huggingface.co/google/t5gemma-2b-2b-prefixlm-it)
    ```python
    >>> from transformers import T5GemmaConfig, T5GemmaModel
    >>> t5gemma_config = T5GemmaConfig.from_pretrained("google/t5gemma-2b-2b-prefixlm-it")
    >>> model = T5GemmaModel(t5gemma_config)
    ```
    Configuration objects inherit from [PretrainedConfig] and can be used to control the model outputs. Read the
    documentation from [PretrainedConfig] for more information.
    Args:
        encoder (`Union[T5GemmaModuleConfig, dict]`, optional, *optional*):
            Configuration for the encoder.
        decoder (`Union[T5GemmaModuleConfig, dict]`, optional, *optional*):
            Configuration for the decoder.
        is_encoder_decoder (bool, optional, *optional*, defaults to `True`):
            Whether the model is used as an encoder/decoder or not.
        dropout_rate (`float`, *optional*, defaults to 0.0):
            The ratio for all dropout layers (following T5).
        classifier_dropout_rate (`float`, *optional*, defaults to 0.0):
            The dropout ratio for classifier (following T5).
        attention_dropout (`float`, *optional*, defaults to 0.0):
            The dropout ratio for attention.
        tie_word_embeddings (`bool`, *optional*, defaults to `True`):
            Whether tie input and output embeddings.
        vocab_size (`int`, *optional*, defaults to 256000):
            Vocabulary size of the T5Gemma model (the same as Gemma 2).
        kwargs (additional keyword arguments, optional, *optional*):
            Will be passed to the PretrainedConfig base class.
    t5gemmapast_key_valuesz!encoder.layers.*.self_attn.q_projcolwisez!encoder.layers.*.self_attn.k_projz!encoder.layers.*.self_attn.v_projz!encoder.layers.*.self_attn.o_projrowwisezencoder.layers.*.mlp.gate_projzencoder.layers.*.mlp.up_projzencoder.layers.*.mlp.down_projz!decoder.layers.*.self_attn.q_projz!decoder.layers.*.self_attn.k_projz!decoder.layers.*.self_attn.v_projz!decoder.layers.*.self_attn.o_projz"decoder.layers.*.cross_attn.q_projz"decoder.layers.*.cross_attn.k_projz"decoder.layers.*.cross_attn.v_projz"decoder.layers.*.cross_attn.o_projzdecoder.layers.*.mlp.gate_projzdecoder.layers.*.mlp.up_projzdecoder.layers.*.mlp.down_proj	input_idsinputs_embedshidden_statesattention_mask)zencoder.embed_tokenszencoder.layerszencoder.normzdecoder.embed_tokenszdecoder.layerszdecoder.normencoderdecoderis_encoder_decoderdropout_rateclassifier_dropout_rateattention_dropouttie_word_embeddings
vocab_sizec	                 V   t        |t              rt        di |}n0|t               }n#t        |t              sJ t        |       d       t        |t              rt        di |}n(||}n#t        |t              sJ t        |       d       t        di |j	                         }t        di |j	                         }d|_        ||_        ||_        || _        d|_        d|_	        ||_        ||_        |j                  |_        || _        dD ]  }
|
|	vst        ||
      |	|
<    t        | <  di |	 || _        |	j#                  d|j                        | _	        |	j#                  d|j$                        | _        || _        || _        || _        || _        || _        y )Nz is not supported.FT)bos_token_idpad_token_ideos_token_id	use_cacheinitializer_ranger0   )
isinstancedictr+   typeto_dict
is_decoderrA   rC   r>   rJ   hidden_sizecross_attention_hidden_sizer?   getattrsuper__init__r@   getrK   rB   rD   rE   )selfr>   r?   r@   rA   rB   rC   rD   rE   kwargsspecial_token_key	__class__s              r2   rU   zT5GemmaConfig.__init__   s    gt$)4G4G_)+Gg':;aWN`=aa;gt$)4G4G_Gg':;aWN`=aa;%:(9:%:(9:"+$5!! +$5!.5.A.A+!Q 	P .,3G=N,O()	P 	"6""4K1B1BC!',?AZAZ![(!2'>$#6  %r1   c                     g d}||v r.t        | j                  ||       t        | j                  ||       t        |   ||       y )N)output_hidden_statesoutput_attentions_attn_implementationrA   rC   rE   )setattrr>   r?   rT   __setattr__)rW   keyvalueshared_attr_with_submodulesrZ   s       r2   r`   zT5GemmaConfig.__setattr__   sE    '
# --DLL#u-DLL#u-C'r1   c                     | S Nr0   )rW   argsrX   s      r2   get_text_configzT5GemmaConfig.get_text_config   s    r1   )NNT        rh   rh   Ti  )r-   r.   r/   __doc__
model_typekeys_to_ignore_at_inferencebase_model_tp_planbase_model_pp_planr   r   r+   rM   r   boolfloatintrU   r`   rg   __classcell__rZ   s   @r2   r5   r5   D   s   B J#4"5+Y 	,Y 	,Y	
 	,Y 	)) 	'	 	)) 	,Y 	,Y 	,Y 	,Y 	-i 	-i  	-i!" 	-i#$ 	))%& 	'	'( 	)))0 #.0A B+-=>@QR)*_,=>"-0A B+-=>@QR)*_,=>	 IMHL#'!),#&$( 8%% 3T#s(^ CDE8% % 3T#s(^ CDE8% !	8%
 8% "'8% !8% "8% 8%t(r1   r5   c                       e Zd Zy)T5GemmaRMSNormNr,   r0   r1   r2   rt   rt      r3   r1   rt   c                   $     e Zd Z fdZd Z xZS )
T5GemmaMLPc                 l    t         |   |       t        j                  |j                        | _        y re   )rT   rU   nnDropoutrA   dropoutrW   configrZ   s     r2   rU   zT5GemmaMLP.__init__   s&     zz&"5"56r1   c                     | j                  | j                  |            | j                  |      z  }| j                  |      }| j	                  |      }|S re   )act_fn	gate_projup_projrz   	down_proj)rW   xr<   r   s       r2   forwardzT5GemmaMLP.forward   sH    DNN1$56aH]3NN=1	r1   )r-   r.   r/   rU   r   rq   rr   s   @r2   rv   rv      s    7r1   rv   c                         e Zd Zd fd	Z xZS )T5GemmaRotaryEmbeddingc                 &    t         |   ||       y re   )rT   rU   )rW   r|   devicerZ   s      r2   rU   zT5GemmaRotaryEmbedding.__init__   s    (r1   re   )r-   r.   r/   rU   rq   rr   s   @r2   r   r      s    ) )r1   r   c                   (     e Zd Zdedef fdZ xZS )T5GemmaSelfAttentionr|   	layer_idxc                 H    t         |   ||       |j                  | _        y re   )rT   rU   rP   	is_causalrW   r|   r   rZ   s      r2   rU   zT5GemmaSelfAttention.__init__   s    +**r1   )r-   r.   r/   r+   rp   rU   rq   rr   s   @r2   r   r      s    +2 +s + +r1   r   c                       e Zd Zdedef fdZ eddd      	 ddej                  d	e	ej                     d
e	ej                     de	e
   dee   deej                  e	ej                     e	eej                        f   fd       Z xZS )T5GemmaCrossAttentionr|   r   c                    t         |   ||       | `d| _        |j                  t        d      t        j                  |j                  |j                  | j                  z  |j                        | _        t        j                  |j                  |j                  | j                  z  |j                        | _        y )NFzBCross-attention needs cross_attention_hidden_size to be specified.bias)rT   rU   sliding_windowr   rR   
ValueErrorrx   Linearnum_key_value_headshead_dimattention_biask_projv_projr   s      r2   rU   zT5GemmaCrossAttention.__init__   s    +--5abbii..0J0JT]]0Zagavav
 ii..0J0JT]]0Zagavav
r1   past_key_valuer7   4.58new_nameversionr<   r=   encoder_hidden_statesrX   returnc                    |t        d      |j                  d d }g |d| j                  }| j                  |      j	                  |      j                  dd      }|1|j                  j                  | j                        }	|j                  }
|	s|j                  d d }g |d| j                  }| j                  |      j	                  |      j                  dd      }| j                  |      j	                  |      j                  dd      }|
j                  ||| j                        \  }}d|j                  | j                  <   nF
j                  | j                     j                  }|
j                  | j                     j                  }t         }| j"                  j$                  dk7  rt&        | j"                  j$                     } || ||||f| j(                  r| j*                  nd| j,                  d | j.                  d|\  }} |j0                  g |d j3                         }| j5                  |      }||fS )	Nz5Encoder hidden state is required for cross attention.   r    Teagerrh   )rz   scalingr   softcap)r   shaper   q_projview	transpose
is_updatedrV   r   cross_attention_cacher   r   updatelayerskeysvaluesr)   r|   r^   r   trainingrC   r   attn_logit_softcappingreshape
contiguouso_proj)rW   r<   r=   r   r7   rX   input_shapehidden_shapequery_statesr   curr_past_key_valueencoder_input_shapeencoder_hidden_shape
key_statesvalue_statesattention_interfaceattn_outputattn_weightss                     r2   r   zT5GemmaCrossAttention.forward  sI    !(TUU#))#2.88b8$--8{{=166|DNNqRST&(3377GJ"1"G"G"*"7"="=cr"B#L%8#L"#Ldmm#L %:;@@AUV``abdefJ;;'<=BBCWXbbcdfghL*+>+E+EjR^`d`n`n+o(
L=A**4>>:,33DNNCHHJ.55dnnELLL(?;;++w6"9$++:Z:Z"[$7%
 /3mmD**LL//%
 %
!\ *k));;;;FFHkk+.L((r1   re   )r-   r.   r/   r+   rp   rU   r   torchTensorr   r   r   r   tupler   rq   rr   s   @r2   r   r      s    
2 
s 
 %0A6R ,03)||3) !.3)  (5	3)
 "%3) -.3) 
u||Xell3XeELL>Q5RR	S3) S3)r1   r   r=   r   c           
      P     dt         dt         dt         dt         dt        f
 fd}|S )z4
    This creates bidirectional attention mask.
    	batch_idxhead_idxq_idxkv_idxr   c                     %t        j                  dt         j                        S | |f   j                  t         j                        S )Nr0   dtype)r   onesrn   to)r   r   r   r   r=   s       r2   
inner_maskz/bidirectional_mask_function.<locals>.inner_mask?  s=    !::b

33i/033EJJ??r1   rp   rn   )r=   r   s   ` r2   bidirectional_mask_functionr   :  s9    
@c @S @ @c @d @
 r1   r   c           
      P     dt         dt         dt         dt         dt        f
 fd}|S )zH
    This creates bidirectional attention mask with sliding window.
    r   r   r   r   r   c                 &    |z
  |k  ||z   k  z  S re   r0   )r   r   r   r   r   s       r2   r   z>sliding_window_bidirectional_mask_function.<locals>.inner_maskL  s"    &/FU^=S4STTr1   r   )r   r   s   ` r2   *sliding_window_bidirectional_mask_functionr   G  s9    
Uc US U Uc Ud U r1   c                        e Zd ZdZdef fdZ	 	 d
dej                  deej                  ej                  f   de	ej                     de	ej                     deej                  f   f
d	Z xZS )T5GemmaEncoderLayerzEncoder sub-layer.r   c                 D   t         |           |j                  | _        || _        || _        |j
                  |   | _        t        ||      | _        t        |j                  |j                        | _        t        |j                  |j                        | _        t        |      | _        t        |j                  |j                        | _        t        |j                  |j                        | _        t#        j$                  |j&                        | _        y N)r|   r   eps)rT   rU   rQ   r|   r   layer_typesattention_typer   	self_attnrt   rms_norm_epspre_self_attn_layernormpost_self_attn_layernormrv   mlppre_feedforward_layernormpost_feedforward_layernormrx   ry   rA   rz   r   s      r2   rU   zT5GemmaEncoderLayer.__init__U  s    !--"$00;-
 (6f6H6HfNaNa'b$(6v7I7IvObOb(c%f%)78J8JPVPcPc)d&*89K9KQWQdQd*e'zz&"5"56r1   r<   position_embeddingsr=   position_idsr   c           	      >   |}| j                  |      } | j                  d||||d d|\  }}| j                  |      }|| j                  |      z   }|}| j	                  |      }| j                  |      }| j                  |      }|| j                  |      z   }|S )N)r<   r   r=   r   r7   r0   )r   r   r   rz   r   r   r   )rW   r<   r   r=   r   rX   residual_s           r2   r   zT5GemmaEncoderLayer.forwardi  s     !44]C)4>> 
' 3)% 
 
q 55mD 4<<#>> 66}E/77F 4<<#>>r1   )NN)r-   r.   r/   ri   rp   rU   r   r   r   r   
LongTensorFloatTensorr   rq   rr   s   @r2   r   r   R  s    7# 70 2637|| #5<<#=> !.	
 u//0 
u  !	"r1   r   c                   l    e Zd ZdZdef fdZ eddd      	 	 	 	 	 	 	 ddej                  d	e	ej                  ej                  f   d
e
ej                     de
ej                     de
e   de
e   de
ej                     de
ej                     de
ej                     dej                  fd       Z xZS )T5GemmaDecoderLayerz2Decoder sub-layer: an extra cross-attention layer.r   c                     t         |   ||       t        ||      | _        t	        |j
                  |j                        | _        t	        |j
                  |j                        | _        y r   )	rT   rU   r   
cross_attnrt   rQ   r   pre_cross_attn_layernormpost_cross_attn_layernormr   s      r2   rU   zT5GemmaDecoderLayer.__init__  sW    +/vS(6v7I7IvObOb(c%)78J8JPVPcPc)d&r1   r   r7   r   r   r<   r   r=   r   rJ   cache_positionr   encoder_attention_maskr   c
                    |}| j                  |      } | j                  d||||||j                  nd ||d|
\  }}| j                  |      }|| j	                  |      z   }|}| j                  |      } | j                  d|||	||d|
\  }}| j                  |      }|| j	                  |      z   }|}| j                  |      }| j                  |      }| j                  |      }|| j	                  |      z   }|S )N)r<   r   r=   r   r7   rJ   r   )r<   r   r=   r7   rJ   r0   )r   r   self_attention_cacher   rz   r   r   r   r   r   r   )rW   r<   r   r=   r   r7   rJ   r   r   r   rX   r   r   s                r2   r   zT5GemmaDecoderLayer.forward  s>    !44]C)4>> 	
' 3)%DSD_O@@ei)	
 	
q 55mD 4<<#>> 55mD*4?? 
'"71+
 
q 66}E 4<<#>> 66}E/77F 4<<#>>r1   )NNNFNNN)r-   r.   r/   ri   rp   rU   r   r   r   r   r   r   r
   rn   r   r   rq   rr   s   @r2   r   r     s   <e# e %0A6R
 26379=$)598<9=.||. #5<<#=>. !.	.
 u//0. ""56. D>. !!1!12.  (5. !) 6. 
		. S.r1   r   c                   j     e Zd ZdZd	dededef fdZdej                  dej                  fdZ	 xZ
S )
T5GemmaClassificationHeadz-Head for sentence-level classification tasks.rQ   
num_labelsrB   c                     t         |           t        j                  |      | _        t        j
                  ||      | _        y )N)p)rT   rU   rx   ry   rz   r   out_proj)rW   rQ   r   rB   rZ   s       r2   rU   z"T5GemmaClassificationHead.__init__  s1    zz$;<		+z:r1   r<   r   c                 J    | j                  |      }| j                  |      }|S re   )rz   r   )rW   r<   s     r2   r   z!T5GemmaClassificationHead.forward  s$    ]3m4r1   )rh   )r-   r.   r/   ri   rp   ro   rU   r   r   r   rq   rr   s   @r2   r   r     s<    7;C ;S ;SX ;
U\\ ell r1   r   c                   j     e Zd ZdZd	dededef fdZdej                  dej                  fdZ	 xZ
S )
T5GemmaLMHeadz.Head for language modeling (generation) tasks.rQ   rE   r   c                 \    t         |           t        j                  |||      | _        y )Nr   )rT   rU   rx   r   r   )rW   rQ   rE   r   rZ   s       r2   rU   zT5GemmaLMHead.__init__  s"    		+zEr1   r<   r   c                 (    | j                  |      }|S re   )r   )rW   r<   logitss      r2   r   zT5GemmaLMHead.forward  s    }-r1   )F)r-   r.   r/   ri   rp   rn   rU   r   r   r   rq   rr   s   @r2   r   r     s?    8FC FS F FU\\ ell r1   r   c                   2    e Zd ZU eed<   dZdZdgZd Zd Z	y)T5GemmaPreTrainedModelr|   modelTT5GemmaBlockc                    t        j                  | |       | j                  j                  }t	        |t
              r|j                  j                  j                  d   dz  }|j                  j                  j                  j                  d||z         t        |j                  d      rF|j                  j                  /|j                  j                  j                  j                          y y y t	        |t              rr| j                  j                  s[|j                  j                  j                  d   dz  }|j                  j                  j                  j                  d||z         y y y )Nr   g      rh   )meanstdr   )r   _init_weightsr|   rK   rL   r   r   weightr   datanormal_hasattrr   zero_r   rD   )rW   moduler  scales       r2   r  z$T5GemmaPreTrainedModel._init_weights  s   %%dF3kk++f78OO**003t;EOO""''//ScEk/Jv/FOO4H4H4T$$))//1 5U/.;;22..44Q74?&&++33#+3N 3 /r1   c                 `   | j                   j                  j                  }| j                   j                  j                  }|t	        d      |j                  |j                        }|dddf   j                         |dddf<   ||d<   |t	        d      |j                  |dk(  |       |S )	z
        Shifts input_ids to the right, prepends the decoder_start_token_id, and handles
        pad_token_id replacement for labels that were -100.
        This is a common preparation step for decoder inputs in sequence-to-sequence models.
        Nz:self.model.config.decoder.bos_token_id has to be defined. .r   r   ).r   z9self.model.config.decoder.pad_token_id has to be defined.i)	r|   r?   rG   rH   r   	new_zerosr   clonemasked_fill_)rW   r:   decoder_start_token_idrH   shifted_input_idss        r2   _shift_rightz#T5GemmaPreTrainedModel._shift_right  s     "&!4!4!A!A{{**77!)YZZ &//	@%.sCRCx%8%>%>%@#qr'"$:&!XYY 	&&'8D'@,O  r1   N)
r-   r.   r/   r5   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modulesr  r  r0   r1   r2   r  r    s(    &*#'(O!r1   r  	token_idsr<   rH   c                    | <|t        d      | |k7  j                  |j                  t        j                        }|S t        j
                  |j                  d   |j                  d   f|j                  t        j                        }|S )z%Construct the default attention mask.z3`pad_token_id` is required for padding information.r   r   r   r   )r   r   r   r   longr   r   )r  r<   rH   r=   s       r2   make_default_2d_attention_maskr    s     RSS#|3778L8LejjY
    #]%8%8%;<]EYEYafakak
 r1   c                        e Zd ZeedZ fdZe	 	 	 	 d
dee	j                     dee	j                     dee	j                     dee	j                     dee   defd	       Z xZS )T5GemmaEncoder)
attentionsr<   c           	      T   t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        |j                  |j                        | _        t        |      | _        d| _        t        j                  t!        |j"                        D cg c]  }t%        ||       c}      | _        t        j(                  |j*                        | _        | j/                          y c c}w )Nr   r|   F)rT   rU   rH   padding_idxrE   rx   	EmbeddingrQ   embed_tokensrt   r   normr   
rotary_embgradient_checkpointing
ModuleListrangenum_hidden_layersr   r   ry   rA   rz   	post_initr   s      r2   rU   zT5GemmaEncoder.__init__"  s     !.. ++LL):):F<N<NPTP`P`a"6#5#56;N;NO	0?&+#mmEJ6KcKcEde	 3e
 zz&"5"56 	 fs   D%r:   r=   r   r;   rX   r   c           	         |d u |d uz  rt        d      |j                  dd        || j                  |      }t        j                  d|j
                  d   |j                        }||j                  d      }|!t        ||| j                  j                        }t        |x}t              sb| j                  |||d |d}t        di |dt        |      it        di |t!        | j                  j"                        t        |      dd	}|}	| j%                  |	|      }
t        j&                  | j                  j(                  d
z  |	j*                        }|	|z  }	| j-                  |	      }	| j.                  d | j                  j0                   D ]  } ||	|
||j2                     |fi |}	 | j5                  |	      }	| j-                  |	      }	t7        |	      S )N:You must specify exactly one of input_ids or inputs_embedsr7   r   r   r   r|   input_embedsr=   r   r7   r   or_mask_function)r4  and_mask_functionfull_attentionsliding_attention      ?r   )last_hidden_stater0   )r   popr'  r   aranger   r   	unsqueezer  r|   rH   rL   rM   r'   r   r(   r   r   r)  tensorrQ   r   rz   r   r-  r   r(  r   )rW   r:   r=   r   r;   rX   r   self_attn_mask_mappingmask_kwargsr<   r   
normalizerlayer_modules                r2   r   zT5GemmaEncoder.forward4  s    -t";<YZZ 	

$d+  --i8Ma)<)<Q)?H\H\])33A6L!;I}VZVaVaVnVnoNNB0DI++ -"0"0#' ,K #5 #!#%@%P# &G &!&%OPTP[P[PjPj%k&A.&Q&
&" &"oom\J\\$++"9"93">mFYFYZ
%
2]3 KK(G$++*G*GH 	L(#&|'B'BC	
 M	 		-0]3+
 	
r1   NNNN)r-   r.   r/   r   r   _can_record_outputsrU   r   r   r   r   r   r   r   r   r   r   rq   rr   s   @r2   r!  r!    s    *,
$  15153759A
E,,-A
 !.A
 u//0	A

   1 12A
 +,A
 
A
 A
r1   r!  c                   d    e Zd Z eed       eed      edZ fdZe		 	 	 	 	 	 	 	 	 dde
ej                     de
ej                     de
ej                     de
e   d	e
ej                     d
e
e   de
ej                     de
ej                     de
ej                     dee   defd       Z xZS )T5GemmaDecoderr   )index)r"  cross_attentionsr<   c           	          t         |   |       t        j                  t	        |j
                        D cg c]  }t        ||       c}      | _        | j                          y c c}w re   )	rT   rU   rx   r+  r,  r-  r   r   r.  r   s      r2   rU   zT5GemmaDecoder.__init__  sS     mmEJ6KcKcEde	 3e
 	 fs   A'r:   r=   r   r7   r;   rJ   r   r   r   rX   r   c
                    |d u |d uz  rt        d      |t        d      || j                  |      }| j                  s8|r6|4t        t	        | j
                        t	        | j
                              }|F||j                         nd}t        j                  |||j                  d   z   |j                        }||j                  d      }|#|!t        ||| j
                  j                        }t        |x}t              s8| j
                  |||||j                   nd |d}t#        di |t%        di |d}t        |	x}t              s-| j
                  ||	|d d d}d	t#        di |d
t'        |	      ii}|}| j)                  ||      }t        j*                  | j
                  j,                  dz  |j.                        }||z  }| j1                  |      }| j2                  d | j
                  j4                   D ]#  } |||||j6                     ||||||d	   f	i |
}% | j9                  |      }| j1                  |      }t;        ||      S )Nr0  z0`encoder_hidden_states` must be given in decoderr$  r   r   r1  r2  r6  r7  r4  r9  r   )r:  r7   r0   )r   r'  r   r
   r	   r|   get_seq_lengthr   r<  r   r   r=  r  rH   rL   rM   r   r'   r(   r   r)  r>  rQ   r   rz   r   r-  r   r(  r   )rW   r:   r=   r   r7   r;   rJ   r   r   r   rX   past_seen_tokensr?  r@  cross_attn_mask_mappingr<   r   rA  rB  s                      r2   r   zT5GemmaDecoder.forward  s    -t";<YZZ (OPP  --i8M}}/F1,dkk2RT`hlhshsTtuO!CRC^==?de"\\ "2]5H5H5K"KTaThThN )33A6L!o&=;I}VZVaVaVnVnoNNB0DI++ -"0"0KZKf?#G#Glp ,K #5"C{"C%F%U%U&"
 5KK1TR++ 5"8"0#' $K !"4 #!#%@AW%X#'# &"oom\J\\$++"9"93">mFYFYZ
%
2]3 KK(G$++*G*GH 	L(#&|'B'BC%'(89 M	 		-0]38++
 	
r1   )	NNNNNNNNN)r-   r.   r/   r   r   r   r   rD  rU   r   r   r   r   r   r
   r   rn   r   r   r   r   rq   rr   s   @r2   rF  rF  y  s*   $%9C*+@J,  1515379=59$(598<9=Z
E,,-Z
 !.Z
 u//0	Z

 ""56Z
   1 12Z
 D>Z
 !!1!12Z
  (5Z
 !) 6Z
 +,Z
 
3Z
 Z
r1   rF  c                       e Zd Zdef fdZd Zd Zd Zee		 	 	 	 	 	 	 	 	 	 	 	 dde
ej                     de
ej                     de
ej                     d	e
ej                     d
e
ej                     de
ej                     de
e   de
e   de
ej"                     de
ej"                     de
e   de
ej                     dee   defd              Z xZS )T5GemmaModelr|   c                     t         |   |       |j                  st        d      t	        |j
                        | _        t        |j                        | _        | j                          y )NzVT5GemmaModel only support encoder-decoder modeling. Use `T5GemmaEncoderModel` instead.)	rT   rU   r@   r   r!  r>   rF  r?   r.  r{   s     r2   rU   zT5GemmaModel.__init__  sO     ((uvv%fnn5%fnn5r1   c                     | j                   S re   r>   rW   s    r2   get_encoderzT5GemmaModel.get_encoder  s    ||r1   c                 6    | j                   j                         S re   r>   get_input_embeddingsrS  s    r2   rW  z!T5GemmaModel.get_input_embeddings      ||0022r1   c                 8    | j                   j                  |      S re   r>   set_input_embeddingsrW   new_embeddingss     r2   r[  z!T5GemmaModel.set_input_embeddings      ||00@@r1   r:   r=   r   decoder_input_idsdecoder_attention_maskdecoder_position_idsencoder_outputsr7   r;   decoder_inputs_embedsrJ   r   rX   r   c                    | | j                   d||||	d|}|j                  } | j                  d||||
|||||d	|}t        |j                  |j                  |j                  dd      r|j                  n|j                  f|j                  |j                  |j                  |j                  |j                        S )aX  
        decoder_position_ids (`torch.LongTensor` of shape `(batch_size, decoder_sequence_length)`, *optional*):
            Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0,
            config.decoder.n_positions - 1]`. [What are position IDs?](../glossary#position-ids)
        r:   r=   r   r;   )	r:   r=   r   r;   r7   r   r   rJ   r   r\   F)r:  r7   decoder_hidden_statesdecoder_attentionsrH  encoder_last_hidden_stater   encoder_attentionsr0   )	r>   r:  r?   r   r7   rV   r<   r"  rH  )rW   r:   r=   r   r_  r`  ra  rb  r7   r;   rc  rJ   r   rX   r   decoder_outputss                   r2   r   zT5GemmaModel.forward  s    . "*dll #-)+	
 O !0 A A&$,, 
'1-/+"7#1)
 
 "-??+;;zz0%8 #2"?"?!335.99,==&5&G&G"1"?"?.99
 	
r1   )NNNNNNNNNNNN)r-   r.   r/   r5   rU   rT  rW  r[  r   r   r   r   r   r   
BoolTensorr   r
   r   rn   r   r   r   r   rq   rr   s   @r2   rO  rO    s_   	} 	3A  156:378<=A;?599=048<$(598
E,,-8
 !!2!238
 u//0	8

 $E$4$458
 !))9)9 :8
 'u'7'788
 "/28
 ""568
  -8
  (58
 D>8
 !!1!128
 +,8
 
8
  8
r1   rO  c                        e Zd Zdef fdZd Zd Zee	 	 	 	 dde	e
j                     de	e
j                     de	e
j                     de	e
j                     d	ee   d
efd              Z xZS )T5GemmaEncoderModelr|   c                     t         |   |       |j                  rt        d      t	        |j
                        | _        | j                          y )NzQT5GemmaEncoderModel only supports encoder-only model. Use `T5GemmaModel` instead.)rT   rU   r@   r   r!  r>   r.  r{   s     r2   rU   zT5GemmaEncoderModel.__init__;  s?     $$pqq%fnn5r1   c                 6    | j                   j                         S re   rV  rS  s    r2   rW  z(T5GemmaEncoderModel.get_input_embeddingsD  rX  r1   c                 8    | j                   j                  |      S re   rZ  r\  s     r2   r[  z(T5GemmaEncoderModel.set_input_embeddingsG  r^  r1   r:   r=   r   r;   rX   r   c                 4     | j                   d||||d|}|S )Nre  r0   rR  )rW   r:   r=   r   r;   rX   rb  s          r2   r   zT5GemmaEncoderModel.forwardJ  s7     '$,, 
)%'	

 
 r1   rC  )r-   r.   r/   r5   rU   rW  r[  r   r   r   r   r   r   r   r   r   r   r   rq   rr   s   @r2   rm  rm  9  s    } 3A  156:3704E,,- !!2!23 u//0	
  - +, 
  r1   rm  c            %       Z    e Zd ZddgZddiZddgdgfiZdef fdZd	 Zd
 Z	d Z
d Zd Zee	 	 	 	 	 	 	 	 	 	 	 	 	 	 d deej"                     deej$                     deej"                     deej"                     deej&                     deej"                     dee   dee   deej$                     deej$                     deej"                     dee   deej"                     deeej2                  f   dee   deeej$                     ef   f d              Zdej2                  fdZ xZ S )!T5GemmaForConditionalGenerationz!model.decoder.embed_tokens.weightzlm_head.out_proj.weightzlm_head.out_projcolwise_repr<   r   r|   c                    d|_         t        | 	  |       t        |      | _        |j
                  j                  | _        t        |j
                  j                  | j                        | _	        d| _
        | j                          y )NTForMaskedLM)r@   rT   rU   rO  r  r?   rE   r   rQ   lm_head	loss_typer.  r{   s     r2   rU   z(T5GemmaForConditionalGeneration.__init__c  sb    $(! !&)
 ..33$V^^%?%?Q&r1   c                 &    || j                   _        y re   rw  r   r\  s     r2   set_output_embeddingsz5T5GemmaForConditionalGeneration.set_output_embeddingsn  s     .r1   c                 .    | j                   j                  S re   rz  rS  s    r2   get_output_embeddingsz5T5GemmaForConditionalGeneration.get_output_embeddingsq  s    ||$$$r1   c                     | j                   j                  rC| j                  | j                  j                  | j                         j                                y y re   )r|   rD   _tie_or_clone_weightsrw  r   get_decoderrW  rS  s    r2   _tie_weightsz,T5GemmaForConditionalGeneration._tie_weightst  s@    ;;**&&t||'<'<d>N>N>P>e>e>gh +r1   c                 .    | j                   j                  S re   )r  r>   rS  s    r2   rT  z+T5GemmaForConditionalGeneration.get_encodery      zz!!!r1   c                 .    | j                   j                  S re   )r  r?   rS  s    r2   r  z+T5GemmaForConditionalGeneration.get_decoder|  r  r1   r:   r=   r   r_  r`  ra  rb  r7   r;   rc  labelsrJ   r   logits_to_keeprX   r   c                 x   | j                   r]| j                  j                  dk7  rDd| j                  j                   d}t               rt	        |      t
        j                  |       |||
| j                  |      } | j                  d|||||||||	|
||d|}|j                  }t        |t              rt        | d      n|}| j                  |dd|ddf         }| j                         j                  }|j                  3||j                  z  }t!        j"                  |      }||j                  z  }d}| | j$                  ||| j&                  fi |}t)        |||j*                  |j,                  |j.                  |j0                  |j2                  |j4                  |j6                  	      S )a  
        decoder_position_ids (`torch.LongTensor` of shape `(batch_size, decoder_sequence_length)`, *optional*):
            Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0,
            config.decoder.n_positions - 1]`. [What are position IDs?](../glossary#position-ids)
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
        r   ziIt is strongly recommended to train T5Gemma models with the `eager` attention implementation instead of `zp`. Use `eager` with `AutoModelForCausalLM.from_pretrained('<path-to-checkpoint>', attn_implementation='eager')`.N)r:   r=   r   r_  r`  ra  rb  r7   r;   rc  rJ   r   )	lossr   r7   rf  rg  rH  rh  r   ri  r0   )r   r|   r^   r   r   loggerwarning_oncer  r  r:  rL   rp   slicerw  r  final_logit_softcappingr   tanhloss_functionrE   r   r7   rf  rg  rH  rh  r   ri  )rW   r:   r=   r   r_  r`  ra  rb  r7   r;   rc  r  rJ   r   r  rX   msgrj  r<   slice_indicesr   decoder_configr  s                          r2   r   z'T5GemmaForConditionalGeneration.forward  s   : ==T[[==H#{{??@  Aqr  () o%##C("3";@U@] $ 1 1& 9.8djj /
)%/#9!5++'"7)/
 /
  (998B>SV8W~ot4]kmA}a,?@A))+2211=nDDDFZZ'FnDDDF%4%%ffdooPPD+;;"1"G"G.AA,==&5&O&O"1"G"G.AA

 
	
r1   c                 $    | j                  |      S re   )r  )rW   r  s     r2   %prepare_decoder_input_ids_from_labelszET5GemmaForConditionalGeneration.prepare_decoder_input_ids_from_labels  s      ((r1   )NNNNNNNNNNNNNr   )!r-   r.   r/   _tied_weights_keys_tp_plan_pp_planr5   rU   r{  r}  r  rT  r  r   r   r   r   r   r   rk  r   r
   rn   r   rp   r   r   r   r   r   r   r  rq   rr   s   @r2   rs  rs  ^  s   =?XY"M2H"o%6
$CDH	} 	/%i
""  156:378<=A;?599=59=A-1$(5934R
E,,-R
 !!2!23R
 u//0	R

 $E$4$45R
 !))9)9 :R
 'u'7'78R
 "/2R
 ""56R
   1 12R
  ((9(9:R
 ))*R
 D>R
 !!1!12R
 c5<</0R
  +,!R
" 
uU&&'8	9#R
  R
h)ELL )r1   rs  c                       e Zd Zddedee   f fdZd Zd Ze	e
	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     d	eej                     d
eej                     deej                     dee   deej                     deej                     deej                     dee   defd              Z xZS ) T5GemmaForSequenceClassificationr|   r@   c                    |||_         t        | 	  |       |j                  | _        |j                   rt	        |      | _        nt        |      | _        |j                  j                  }|j                   r|j                  j                  }t        |dd      }t        || j                  |      | _        | j                          y)z
        is_encoder_decoder (`Optional`, *optional*):
            Whether use encoder_decoder for sequence classification. When set to False, only encoder is used.
        NrB   皙?r@   rT   rU   r   rO  r  rm  r>   rQ   r?   rS   r   scorer.  rW   r|   r@   rQ   classifier_dropoutrZ   s        r2   rU   z)T5GemmaForSequenceClassification.__init__  s    
 )(:F%  ++$$%f-DJ,V4DJnn00$$ ..44K$V-FL.{DOOM_`
r1   c                 6    | j                   j                         S re   r  rW  rS  s    r2   rW  z5T5GemmaForSequenceClassification.get_input_embeddings      zz..00r1   c                 :    | j                   j                  |       y re   r  r[  rW   rb   s     r2   r[  z5T5GemmaForSequenceClassification.set_input_embeddings      

''.r1   r:   r=   r   r_  r`  ra  rb  r;   rc  r  rX   r   c                    | j                   j                  r'|%|#t        d| j                  j                   d      | j                   j                  r"| |	|t        d      | j                  |      }| j                   j                  rB | j                  |f||||||||	dd	|}|j                  }|j                  }|j                  }n; | j                  |f|||d|}|j                  }|j                  }|j                  }| j                  |      }||j                  d   }n|j                  d   }| j                   j                  |d	k7  rt        d
      | j                   j                  d}n||| j                   j                  k7  j!                  |j"                  t$        j&                        }t%        j(                  |j                  d   |j"                  t$        j&                        }||z  j+                  d      }| j                   j                  r[|d	z  }t%        j,                  ||j                  d   d	z
        }n.d}t.        j1                  | j                  j                   d       |t%        j(                  ||j"                        |f   }d}|
| j3                  ||
|| j                         }t5        ||||      S )  
        decoder_position_ids (`torch.LongTensor` of shape `(batch_size, decoder_sequence_length)`, *optional*):
            Indices of positions of each decoder input sequence tokens in the position embeddings. Selected in the range `[0,
            config.decoder.n_positions - 1]`. [What are position IDs?](../glossary#position-ids)
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        N8Passing input embeddings is currently not supported for  in encoder-decoder mode.If no `decoder_input_ids` or `decoder_inputs_embeds` are passed, `input_ids` cannot be `None`. Please pass either `input_ids` or `decoder_input_ids` or `decoder_inputs_embeds`.F	r=   r   r_  r`  ra  rb  r;   rc  rJ   r=   r   r;   r   r   z=Cannot handle batch sizes > 1 if no padding token is defined.r   r  )maxz will not detect padding tokens in `inputs_embeds`. Results may be unexpected if using padding tokens in conjunction with `inputs_embeds.`r1  )r   r  pooled_logitsr|   r  r   r<   r"  )r|   r@   NotImplementedErrorrZ   r-   r   r  r  r:  rf  rg  r<   r"  r  r   rH   r   r   r   int32r<  argmaxclampr  r  r  r   )rW   r:   r=   r   r_  r`  ra  rb  r;   rc  r  rX   outputsr:  r<   r"  r   
batch_sizelast_non_pad_tokennon_pad_masktoken_indicesr  r  s                          r2   r   z(T5GemmaForSequenceClassification.forward  s   2 ;;))y/@]E^%J4>>KbKbJcc|} 
 ;;))/@/HMbMj  U 
 !% 1 1) <;;))*4$**+-)"3'=%9 /+&;+ +G !( 9 9#99M 33J'1tzz(-)+	(
 (G !( 9 9#11M ++J-. "+J&,,Q/J;;##+
a\]];;##+!#"%)A)AAEEfmmUZU`U`aL!LL)<V]]Z_ZeZefM"/,">!F!Fr!J{{--"a'"%*[[1CIZI`I`acIdghIh%i"!#>>**+ ,Z Z
 u||Jv}}MOaab%%VFR_hlhshs%tD' '!	
 	
r1   re   
NNNNNNNNNN)r-   r.   r/   r5   r   rn   rU   rW  r[  r   r   r   r   r   r   r   r   r   r   r   rq   rr   s   @r2   r  r    sN   } (4. .1/  1515378<9=;?5959=A-1i
E,,-i
 !.i
 u//0	i

 $E$4$45i
 !) 6i
 'u'7'78i
 "/2i
   1 12i
  ((9(9:i
 ))*i
 +,i
 
"i
  i
r1   r  c                       e Zd Zddedee   f fdZd Zd Ze	e
	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     d	eej                     d
eej                     deej                     dee   deej                     deej                     deej                     dee   defd              Z xZS )T5GemmaForTokenClassificationr|   r@   c                    |||_         t        | 	  |       |j                  | _        |j                   rt	        |      | _        nt        |      | _        |j                  j                  }|j                   r|j                  j                  }t        |dd      }t        || j                  |      | _        | j                          y)z
        is_encoder_decoder (`Optional`, *optional*):
            Whether use encoder_decoder for token classification. When set to False, only encoder is used.
        NrB   r  r  r  s        r2   rU   z&T5GemmaForTokenClassification.__init__h  s    
 )(:F%  ++$$%f-DJ,V4DJnn00$$ ..44K$V-FL.{DOOM_`
r1   c                 6    | j                   j                         S re   r  rS  s    r2   rW  z2T5GemmaForTokenClassification.get_input_embeddings  r  r1   c                 :    | j                   j                  |       y re   r  r  s     r2   r[  z2T5GemmaForTokenClassification.set_input_embeddings  r  r1   r:   r=   r   r_  r`  ra  rb  r;   rc  r  rX   r   c                    | j                   j                  r'|%|#t        d| j                  j                   d      | j                   j                  r"| |	|t        d      | j                  |      }| j                   j                  rB | j                  |f||||||||	dd	|}|j                  }|j                  }|j                  }n; | j                  |f|||d|}|j                  }|j                  }|j                  }| j                  |      }d}|
| j                  ||
| j                         }t        ||||      S )	r  Nr  r  r  Fr  r  r  )r|   r@   r  rZ   r-   r   r  r  r:  rf  rg  r<   r"  r  r  r   )rW   r:   r=   r   r_  r`  ra  rb  r;   rc  r  rX   r  r:  r<   r"  r   r  s                     r2   r   z%T5GemmaForTokenClassification.forward  s   4 ;;))y/@]E^%J4>>KbKbJcc|}  ;;))/@/HMbMj  U 
 !% 1 1) <;;))*4$**+-)"3'=%9 /+&;+ +G !( 9 9#99M 33J'1tzz(-)+	(
 (G !( 9 9#11M ++J-.%%ffdkkBD$'!	
 	
r1   re   r  )r-   r.   r/   r5   r   rn   rU   rW  r[  r   r   r   r   r   r   r   r   r   r   r   rq   rr   s   @r2   r  r  f  sN   } (4. 01/  1515378<9=;?5959=A-1N
E,,-N
 !.N
 u//0	N

 $E$4$45N
 !) 6N
 'u'7'78N
 "/2N
   1 12N
  ((9(9:N
 ))*N
 +,N
 
N
  N
r1   r  )r5   r+   rs  rO  rm  r  r  r  )Utypingr   r   r   r   r   torch.nnrx   cache_utilsr   r	   r
   configuration_utilsr   
generationr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   r   r   r   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   r   utils.deprecationr   utils.genericr   r   gemma2.configuration_gemma2r!   gemma2.modeling_gemma2r"   r#   r$   r%   r&   r'   r(   r)   _CHECKPOINT_FOR_DOC
get_loggerr-   r  r+   r5   rt   rv   r   r   r   r   r   rp   r   r   r   Moduler   r   r  r   r  r!  rF  rO  rm  rs  r  r  __all__r0   r1   r2   <module>r     sQ    2 1   C C 3 ) B 9  G &  1 ? 6	 	 	 9  
		H	%	, 	P$ Pf	] 		 	)2 )
+? +D)O D)N
0F 
8 
s x 14 1h8- 8v		 	BII 	 ,!2 ,! ,!^(()<< 3- \\	"Z
+ Z
zj
^ j
Z O
) O
 O
d !0 ! !Hx)&<o x)v I
'= I
 I
X o
$: o
 o
d	r1   