
    h                       d Z ddlZddlZddlZddlmZmZmZ ddlZddlm	Z	 ddl
mZ ddlmZ ddlmZmZmZ dd	lmZ dd
lmZ ddlmZ ddlmZmZmZmZ ddlmZ ddlm Z m!Z! ddl"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z) ddl*m+Z+ ddl,m-Z-  e&       rddl.m/Z/ ddl0m1Z1  e)jd                  e3      Z4dUdejj                  de6de6de6dejj                  f
dZ7dejj                  de6de6dejj                  fdZ8dUdejj                  de6de6de6dejj                  f
dZ9de6dejj                  fdZ:d ejj                  de6dejj                  fd!Z;d"ejj                  de6d#ejx                  dejj                  fd$Z=d"ejj                  d%e6de>ejj                  ejj                  f   fd&Z?d"ejj                  d%e6dejj                  fd'Z@d(ejj                  d)ejj                  d*e6dejj                  fd+ZA G d, d-e	j                        ZC	 dd.lDmEZE eEZCe4j                  d/        G d1 d2e	j                        ZJ G d3 d4e	j                        ZK G d5 d6e	j                        ZL G d7 d8e	j                        ZM G d9 d:e	j                        ZN G d; d<e	j                        ZO G d= d>e	j                        ZP G d? d@e	j                        ZQ G dA dBe	j                        ZR G dC dDe	j                        ZS G dE dFe      ZTe% G dG dHe             ZU G dI dJeU      ZVdKZWe% G dL dMeU             ZX e%dNO       G dP dQeUe             ZYe% G dR dSeU             ZZg dTZ[y# eG$ r Y ?eH$ r e4j                  d0       Y Ww xY w)VzPyTorch LongT5 model.    N)AnyOptionalUnion)nn)CrossEntropyLoss   )ACT2FN)CacheDynamicCacheEncoderDecoderCache)GenerationMixin)AttentionMaskConverter)GradientCheckpointingLayer)BaseModelOutput)BaseModelOutputWithPastAndCrossAttentionsSeq2SeqLMOutputSeq2SeqModelOutput)PreTrainedModel) find_pruneable_heads_and_indicesprune_linear_layer)DUMMY_INPUTS
DUMMY_MASKauto_docstringis_torch_flex_attn_availableis_torch_fx_proxyis_torchdynamo_compilinglogging)deprecate_kwarg   )LongT5Config)	BlockMask)make_flex_block_causal_maskx	block_lendim	pad_valuereturnc                 t   | j                   |    |z  }t        | j                         sCt        | j                         }||xx   |z  cc<   t        j                  || j
                        S dg| j                  z  }d|f||<   t        |ddd   d      }t        j                  j                  | |d|      } | S )	zHPad a tensor so that a sequence length will be a multiple of `block_len`dtyper   r   r   N constantpadmodevalue)shapealllisttorchzerosr*   ndimsumr   
functionalr0   )r#   r$   r%   r&   pad_len	new_shaper0   s          h/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/longt5/modeling_longt5.py_pad_to_multipler>   @   s    wws|mi'Gqww<M	#'!{{9AGG44(QVV
C7|CH
c$B$i
C
!:YGAH    c                 >   | j                   |   |z  dk7  rt        | ||d      } | j                   |   |z  }| j                   d| ||fz   | j                   |dz   d z   }d|v r,t        j                  || j                  | j
                        S | j                  |      S )zSplit an input tensor into blocks of a given `block_len` along the given `dim`. If the dimension length
    is not a multiple of `block_len`, it will be padded first with selected `pad_value`.
    r   )r&   Nr   r*   device)r3   r>   r6   emptyr*   rB   reshape)r#   r$   r%   
num_blocksoutput_shapes        r=   _split_into_blocksrG   P   s    
 	wws|i1$Q	3!<*J774C=J	#::QWWcAg[=QQLL{{<qwwqxxHH99\""r?   	block_dimsequence_dimc                    | j                   |   }dg| j                  z  }d||<   t        |ddd   d      }t        j                  j                  | |d|      } g }t        d      D ]M  }t        d	d      g| j                  z  }t        |||z         ||<   t        |      }|j                  | |          O t        j                  ||
      S )zConcatenate three consecutive blocks for each input block for local attentiont.

    For more information, see: https://huggingface.co/papers/2112.07916.
    r+   )r   r   Nr,   r-   r.   r/   r   r   r%   )r3   r8   r9   r   r:   r0   rangeslicetupleappendr6   cat)	r#   rH   rI   r&   rE   r0   blocks_listiindicess	            r=   _concatenate_3_blocksrT   _   s    
 #J(QVV
CC	N
c$B$i
C
!:YGA&(K1X ' D>"QVV+"1a*n5	.1W:&' 99[l33r?   c                     t        j                  d| z  t         j                        }|| |   }|j                  d      |j                  d      z
  }|S )z:Makes 3-blocked relative position ids for local attention.r   r)   r   r   )r6   arangeint32	unsqueeze)r$   position_idscenter_position_idsrelative_position_idss       r=   "_make_3block_relative_position_idsr\   x   sR    <<IU[[AL&y)<(22158K8U8UVW8XX  r?   local_attention_maskc                     t        |      }t        j                  |      |k  }|ddddddf   }|j                  | j                        }t        j
                  | |      S )znMask local attention mask to enforce that tokens are not allowed to attend tokens farther than ``local_radius.N)r\   r6   abstorB   logical_and)r]   r$   r[   locality_masks       r=   _mask_local_attention_maskrc      s_    >yIII34y@M!$a"23M!$$%9%@%@AM1=AAr?   attention_maskrB   c                    t        | |d      }t        |dd      }|j                  d      }|j                  d      }t        j                  ||      }t        ||      }|j                  d      j                  |      S )z;Prepare attention mask to be applied for a local attention.r   rK      rH   rI   r,   )rG   rT   rX   r6   ra   rc   r`   )rd   r$   rB   _blocked_attention_mask_3blocked_attention_maskr]   s         r=   _get_local_attention_maskrk      s     1PQR45LXYhij5??C7AA"E ,,-DF^_56JIV))!,//77r?   global_block_sizec                    | j                   dd \  }dt        j                  dt        j                  ffd}t        j                  | | j                        z  }t        j
                  |d      |z
  }t        j                  | d	k7  d
d      j                  | j                        }t        j                  ||z   d
z
        j                  | j                        }t        j                  d|j                  |j                        }t        j                  ||kD  ||      }|| z  | dz
  z   } ||      }z  }|dkD  rBt        j                  |d      j                  j                  |d      j                  dd      }	n-t        j                  |d|j                  |j                        }	t        j
                  t        j                   ||      d      dz
  }
|
j#                  | j                        }
t        j                  |
|	k  dd      }
|j                  t        j$                        |
j                  t        j$                        fS )a  Obtain the "fixed block" global id corresponding to each input token.

    This implementation is a simplified version of the original Flaxformr implementation adopted from:
    https://github.com/google/flaxformer/blob/main/flaxformer/architectures/longt5/long_attention.py.

    In our scenario, as we use this strategy only for a decoder, orphan tokens, i.e. those tokens which do not make for
    the whole fixed block, are assigned to the preceding block.

    Padding tokens from the original sequence are represented by -1.
    Nrf   	block_idsr'   c                 X   t        j                        z  dz
  k(  }|j                  | j                        }t        j                  || dk\        }|j                  d      j                  d      j                  | j                        dz
  }t        j                  | |k  | |      } | S )Nr   r   r,   )
r6   rV   r`   rB   ra   r9   rX   typer*   where)rn   
block_endstrue_block_endsfull_blocksrl   seq_lens       r=   handle_orphan_tokensz:_make_global_fixed_block_ids.<locals>.handle_orphan_tokens   s    ll7+.??DUXYDYY
]]9#3#34
++J	QG%))"-77;@@QTUUKK	K 7KP	r?   rB   r   )axis              ?g     @r,   rA   r   rK   )r3   r6   Tensor	ones_likerB   cumsumrq   rp   r*   floortensormaxvaluesrepeat	transposer7   onesr`   int)rd   rl   
batch_sizerv   fixed_block_maskmaskglobal_block_ids_global_block_ids_lower_boundnum_globals_sequence_block_ids_maxglobal_segment_idsru   s    `         @r=   _make_global_fixed_block_idsr      s    )..r2J   ~n>S>STWhh||$41=@PP;;~,c7;@@AUAUVD{{4*:#:S#@AFF~G[G[\$)LL;K;Q;QZjZqZq$r!{{88:JLi )>9nq>PQ+,<=..KQ"')),<""E"L"L"S"ST_ab"c"m"mnoqr"s"'++!1!7!7@P@W@W#
 ejj[&IrRUVV+..~/D/DE%7;R%RTUWXY  +-?-D-DUYY-OOOr?   c                     t        | |      \  }}|j                  d   }t        j                  ||j                        }||d   z
  }|j                  t        j                        S )zBCreate the relative position tensor for local -> global attention.r,   rw   .N)r   r3   r6   rV   rB   rp   int64)rd   rl   rn   r   global_seq_lenglobal_positionsside_relative_positions          r=    _make_side_relative_position_idsr      sa    $@Qb$c!I!'--b1N||N9;K;KL-	)0DD!&&u{{33r?   hidden_statesrn   r   c                 x   |j                  |dk\  t        j                  ||j                  |j                              }t
        j                  j                  |j                  t        j                        |dz         ddddddf   }t        j                  d| |j                  | j                              S )zFCompute individual block aggregates by summing over individual blocks.r   rA   r   Nr,   z...nd,...ng->...gd)rq   r6   r   r*   rB   r   r:   one_hotrp   r   einsum)r   rn   r   one_hot_block_idss       r=   _create_global_aggregatesr      s    
 Q^9??S\ScScdI --innU[[.I>\]K]^_`bcehfheh_hi<<,m=N=S=STaTgTg=hiir?   c                   &     e Zd Zd fd	Zd Z xZS )LongT5LayerNormc                     t         |           t        j                  t	        j
                  |            | _        || _        y)zg
        Construct a layernorm module in the LongT5 style. No bias and no subtraction of mean.
        N)super__init__r   	Parameterr6   r   weightvariance_epsilon)selfhidden_sizeeps	__class__s      r=   r   zLongT5LayerNorm.__init__   s1     	ll5::k#:; #r?   c                    |j                  t        j                        j                  d      j	                  dd      }|t        j
                  || j                  z         z  }| j                  j                  t        j                  t        j                  fv r%|j                  | j                  j                        }| j                  |z  S )Nrf   r,   T)keepdim)r`   r6   float32powmeanrsqrtr   r   r*   float16bfloat16)r   r   variances      r=   forwardzLongT5LayerNorm.forward   s     !##EMM266q9>>r4>P%Ht?T?T4T(UU ;; ??),,T[[->->?M{{]**r?   )gư>)__name__
__module____qualname__r   r   __classcell__r   s   @r=   r   r      s    $+r?   r   )FusedRMSNormzSDiscovered apex.normalization.FusedRMSNorm - will use it instead of LongT5LayerNormzFdiscovered apex but it failed to load, falling back to LongT5LayerNormc                   *     e Zd Zdef fdZd Z xZS )LongT5DenseActDenseconfigc                 ^   t         |           t        j                  |j                  |j
                  d      | _        t        j                  |j
                  |j                  d      | _        t        j                  |j                        | _
        t        |j                     | _        y NFbias)r   r   r   Lineard_modeld_ffwiwoDropoutdropout_ratedropoutr	   dense_act_fnactr   r   r   s     r=   r   zLongT5DenseActDense.__init__
  sn    ))FNNFKKeD))FKKeDzz&"5"56&--.r?   c                    | j                  |      }| j                  |      }| j                  |      }t        | j                  j
                  t        j                        r|j                  | j                  j
                  j                  k7  r`| j                  j
                  j                  t        j                  k7  r/|j                  | j                  j
                  j                        }| j	                  |      }|S N)r   r   r   
isinstancer   r   r6   r{   r*   int8r`   )r   r   s     r=   r   zLongT5DenseActDense.forward  s    ./]3tww~~u||4##tww~~';';;$$

2),,TWW^^-A-ABM.r?   r   r   r   r    r   r   r   r   s   @r=   r   r   	  s    /| /r?   r   c                   *     e Zd Zdef fdZd Z xZS )LongT5DenseGatedActDenser   c                    t         |           t        j                  |j                  |j
                  d      | _        t        j                  |j                  |j
                  d      | _        t        j                  |j
                  |j                  d      | _        t        j                  |j                        | _        t        |j                     | _        y r   )r   r   r   r   r   r   wi_0wi_1r   r   r   r   r	   r   r   r   s     r=   r   z!LongT5DenseGatedActDense.__init__   s    IIfnnfkkF	IIfnnfkkF	))FKKeDzz&"5"56&--.r?   c                     | j                  | j                  |            }| j                  |      }||z  }| j                  |      }| j	                  |      }|S r   )r   r   r   r   r   )r   r   hidden_geluhidden_linears       r=   r   z LongT5DenseGatedActDense.forward(  sS    hhtyy78		-0#m3]3.r?   r   r   s   @r=   r   r     s    /| /r?   r   c                   *     e Zd Zdef fdZd Z xZS )LongT5LayerFFr   c                    t         |           |j                  rt        |      | _        nt        |      | _        t        |j                  |j                        | _	        t        j                  |j                        | _        y )Nr   )r   r   is_gated_actr   DenseReluDenser   r   r   layer_norm_epsilon
layer_normr   r   r   r   r   s     r=   r   zLongT5LayerFF.__init__3  s_    ":6"BD"5f"=D)&..f>W>WXzz&"5"56r?   c                 r    | j                  |      }| j                  |      }|| j                  |      z   }|S r   )r   r   r   )r   r   forwarded_statess      r=   r   zLongT5LayerFF.forward=  s=    ??=9../?@%5E(FFr?   r   r   s   @r=   r   r   2  s    7| 7r?   r   c                        e Zd Z	 	 ddedee   f fdZd Zedd       Z	ddZ
 eddd	
      	 	 	 	 	 	 	 	 	 dd       Z xZS )LongT5Attentionr   	layer_idxc                    t         |           |j                  | _        || _        |j                  | _        |j
                  | _        |j                  | _        |j                  | _        |j                  | _
        |j                  | _        | j                  | j                  z  | _        || _        |9| j                  r-t        j!                  d| j"                  j$                   d       t'        j(                  | j                  | j                  d      | _        t'        j(                  | j                  | j                  d      | _        t'        j(                  | j                  | j                  d      | _        t'        j(                  | j                  | j                  d      | _        | j                  r/t'        j2                  | j                  | j                        | _        t7               | _        d| _        y )NzInstantiating a decoder z without passing `layer_idx` is not recommended and will to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` when creating this class.Fr   )r   r   
is_decoderhas_relative_attention_biasrelative_attention_num_bucketsrelative_attention_max_distancer   d_kvkey_value_proj_dim	num_headsn_headsr   r   	inner_dimr   loggerwarning_oncer   r   r   r   qkvo	Embeddingrelative_attention_biassetpruned_headsgradient_checkpointingr   r   r   r   r   s       r=   r   zLongT5Attention.__init__F  ss    	 +++F(.4.S.S+/5/U/U,~~"(++''**(?(??"*4>>+B+B*C D, , 4<<eD4<<eD4<<eD4>>4<<eD+++-<<8[8[]a]i]i+jD(E&+#r?   c                    t        |      dk(  ry t        || j                  | j                  | j                        \  }}t        | j                  |      | _        t        | j                  |      | _        t        | j                  |      | _        t        | j                  |d      | _	        | j                  t        |      z
  | _        | j                  | j                  z  | _
        | j                  j                  |      | _        y Nr   r   rK   lenr   r   r   r   r   r   r   r   r   r   unionr   headsindexs      r=   prune_headszLongT5Attention.prune_headsi      u:?74<<!8!8$:K:K
u $DFFE2#DFFE2#DFFE2#DFFEq9||c%j0004<<? --33E:r?   c                 T   d}|rC|dz  }|| dkD  j                  t        j                        |z  z  }t        j                  |       } n*t        j                  | t        j
                  |              } |dz  }| |k  }|t        j                  | j                         |z        t        j                  ||z        z  ||z
  z  j                  t        j                        z   }t        j                  |t        j                  ||dz
              }|t        j                  || |      z  }|S a  
        Adapted from Mesh Tensorflow:
        https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593

        Translate relative position to a bucket number for relative attention. The relative position is defined as
        memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to
        position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for
        small absolute relative_position and larger buckets for larger absolute relative_positions. All relative
        positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket.
        This should allow for more graceful generalization to longer sequences than the model has been trained on

        Args:
            relative_position: an int32 Tensor
            bidirectional: a boolean - whether the attention is bidirectional
            num_buckets: an integer
            max_distance: an integer

        Returns:
            a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets)
        r   rf   r   r`   r6   longr_   min
zeros_likelogfloatmath	full_likerq   relative_positionbidirectionalnum_bucketsmax_distancerelative_buckets	max_exactis_smallrelative_position_if_larges           r=   _relative_position_bucketz)LongT5Attention._relative_position_buckety  s(   , AK!2Q!6 : :5:: F TT %		*; <!&+<e>N>NO`>a!b b  1$	$y0 &/II'--/);<hh|i/01Y&( "UZZ.	&"
 &+YY&8RT_bcTc(d&
" 	EKK2CE_``r?   c                    | | j                   j                  j                  }|.t        j                  |t        j
                  |      dddf   }n|dddf   j                  |      }t        j                  |t        j
                  |      dddf   }||z
  }| j                  || j                   | j                  | j                        }| j                  |      }	|	j                  g d      j                  d      }	|	S )%Compute binned relative position biasNrA   r  r  r  rf   r   r   r   )r   r   rB   r6   rV   r  r`   r  r   r   r   permuterX   )
r   query_length
key_lengthrB   cache_positioncontext_positionmemory_positionr  relative_position_bucketr   s
             r=   compute_biaszLongT5Attention.compute_bias  s    >1188??F!$||L

SYZ[\^b[bc-ag699&A,,zFSTXZ[T[\+.>>#'#A#A#.;;==	 $B $
  --.FG	*44Q7r?   past_key_valuepast_key_values4.58new_nameversionc                 b   |j                   dd \  }}|du}| j                  |      }|j                  |d| j                  | j                        j                  dd      }|Qt        |t              rA|j                  j                  | j                        }|r|j                  }n|j                  }n|}|r|n|}|rK|IrG|j                  | j                     j                  }|j                  | j                     j                  }n| j!                  |      }| j#                  |      }|j                  |d| j                  | j                        j                  dd      }|j                  |d| j                  | j                        j                  dd      }|D|s|
nd}
|j%                  ||| j                  d|
i      \  }}|rd|j                  | j                  <   t'        j(                  ||j                  dd            }||j                   d   }||n|
d   dz   }| j*                  sZt'        j,                  d| j                  ||f|j.                  |j0                  	      }| j2                  rE| j4                  r9d|_        n1| j9                  |||j.                  |

      }|dddd| dddf   }|#|ddddddd|j                   d   f   }||z   }| j:                  rRt'        j<                  |j                   d         }d|t?        | j:                        <   |dd|jA                         f   }n|}||z  }tB        jD                  jG                  |jI                         d      jK                  |      }tB        jD                  jM                  || jL                  | j4                        }|||z  }t'        j(                  ||      }|j                  dd      jO                         }|j                  |d| jP                        }| jS                  |      }||f}|	r||fz   }|S )z
        Self-attention (if key_value_states is None) or attention over source sentence (provided by key_value_states).
        Nrf   r,   r   r  Tr   rh   rB   r*   )rB   r  r   rK   ptraining)*r3   r   viewr   r   r   r   r   
is_updatedgetr   cross_attention_cacheself_attention_cachelayerskeysr   r   r   updater6   matmulr   r7   rB   r*   r   r.  requires_gradr#  r   r   r5   boolr   r:   softmaxr  type_asr   
contiguousr   r   )r   r   r   key_value_statesposition_biasr%  layer_head_maskr  	use_cacheoutput_attentionsr  r   
seq_lengthis_cross_attentionquery_statesr0  curr_past_key_valuecurrent_states
key_statesvalue_statesscoresr  real_seq_lengthcausal_maskposition_bias_maskedattn_weightsattn_outputoutputss                               r=   r   zLongT5Attention.forward  s   & "/!4!4Ra!8
J .T9vvm,#((RtG^G^_iijkmno &:oGZ+[(3377GJ!&5&K&K#&5&J&J#"1-?)]/"=*,33DNNCHHJ.55dnnELLL/J66.1L#RtG^G^_iijkmnoJ',,ZT\\4KbKbcmmnoqrsL*7It+>+E+Ednn?OQ_>`,(
L &AEO..t~~> lJ,@,@A,FG #))"-J.:.FlN[]L^abLbO33 %j*=fmm[a[g[g! ..4==26M/ $ 1 1#ZVd !2 ! !.aZKL!.C D"1a,Bj.>.>r.B,B#BC - ;::m11!45D,-Dd''()#0DIIK#@ #0 && }},,V\\^,DLLVT}},,\T\\TXTaTa,b &'/9Lll<>!++Aq1<<>!&&z2t~~Fff[)./Gr?   FNT       )NN)	NNNNNNFFN)r   r   r   r    r   r   r   r  staticmethodr  r#  r   r   r   r   s   @r=   r   r   E  s     %*#'	!,!, C=	!,F;  -  - ^( %0A6R l Slr?   r   c                   b     e Zd Zddededdf fdZd Zedd       Zde	fd	Z
	 	 	 	 dd
Z xZS )LongT5LocalAttentionr   r   r'   Nc                    t         |           |j                  | _        || _        |j                  | _        |j
                  | _        |j                  | _        |j                  | _        |j                  | _
        |j                  | _        | j                  dz   | _        |j                  | _        | j                  | j                  z  | _        t!        j"                  | j                  | j                  d      | _        t!        j"                  | j                  | j                  d      | _        t!        j"                  | j                  | j                  d      | _        t!        j"                  | j                  | j                  d      | _        | j                  r/t!        j,                  | j                  | j                        | _        t1               | _        d| _        y )Nr   Fr   )r   r   r   r   r   r   r   r   r   r   r   local_radiusr$   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   s      r=   r   zLongT5LocalAttention.__init__.  sS    +++F(.4.S.S+/5/U/U,~~"(++''"//**Q.**(?(?? 4<<eD4<<eD4<<eD4>>4<<eD+++-<<8[8[]a]i]i+jD(E&+#r?   c                    t        |      dk(  ry t        || j                  | j                  | j                        \  }}t        | j                  |      | _        t        | j                  |      | _        t        | j                  |      | _        t        | j                  |d      | _	        | j                  t        |      z
  | _        | j                  | j                  z  | _
        | j                  j                  |      | _        y r   r   r   s      r=   r  z LongT5LocalAttention.prune_headsH  r  r?   c                 T   d}|rC|dz  }|| dkD  j                  t        j                        |z  z  }t        j                  |       } n*t        j                  | t        j
                  |              } |dz  }| |k  }|t        j                  | j                         |z        t        j                  ||z        z  ||z
  z  j                  t        j                        z   }t        j                  |t        j                  ||dz
              }|t        j                  || |      z  }|S r  r  r  s           r=   r  z.LongT5LocalAttention._relative_position_bucketX  (   . AK!2Q!6 : :5:: F TT %		*; <!&+<e>N>NO`>a!b b  1$	$y0 &/II'--/);<hh|i/01Y&( "UZZ.	&"
 &+YY&8RT_bcTc(d&
" 	EKK2CE_``r?   block_lengthc                    | j                   j                  j                  j                  dk7  r | j                   j                  j                  nd}t	        j
                  d|z  t        j                  |      }|||  }|dddf   |dddf   z
  }| j                  || j                   | j                  | j                        }| j                  |      }|j                  g d      j                  d      j                  d      }|S r  metaNr   rA   r  r  r   r   r   rB   rp   r6   rV   r  r  r   r   r   r  rX   r   r]  target_devicer!  r   r  r"  r   s           r=   r#  z!LongT5LocalAttention.compute_bias      ++2299>>&H ((//66 	
  ,,q<'7uzzR_`*<F ,D!G47G47PP#'#A#A#.;;==	 $B $
  --.FG	*44Q7AA!Dr?   c                 $    |j                   d d \  } fd} fd} | j                  |            }	 | j                  |            }
 | j                  |            }t	        |	 j
                  d      }	t	        |
 j
                  d      }
t	        | j
                  d      }t        |
dd      }
t        |dd      }t        j                  d|	|
      }|ʉ j                  srt        j                  dd j                   j
                  d j
                  z  f|j                  |j                  	      } j                  r/ j                  r#d
|_        n j#                   j
                        }|/t        j$                  |dkD  dd      }||j'                  dd      z   }||z  }t(        j*                  j-                  |j/                         d      j1                  |      }t(        j*                  j3                  | j2                   j                        }|||z  }|j5                  |j                        } |t        j                  d||            }|d d d |d d f   } j7                  |      }||f}|r||fz   }|S )Nrf   c                 T    | j                  dj                  j                        S 
projectionr,   r/  r   r   statesr   r   s    r=   r3   z+LongT5LocalAttention.forward.<locals>.shape  "    ;;z2t||T=T=TUUr?   c                 Z    | j                         j                  dj                        S rD   r,   r<  r/  r   rj  s    r=   unshapez-LongT5LocalAttention.forward.<locals>.unshape  %    $$&++JDNNKKr?   r   rK   rg   ...qhd,...khd->...hqkr   r+  Tr   ry       _r,   r,  ...hqk,...khd->...qhd)r3   r   r   r   rG   r$   rT   r6   r   r   r7   r   rB   r*   r   r.  r8  r#  rq   r   r   r:   r:  r  r;  r   rp   r   )r   r   r   r>  r?  rA  rB  r3   rp  rD  rG  rH  rI  rM  rN  rO  r   s   `               @r=   r   zLongT5LocalAttention.forward  sc    "/!4!4Ra!8
J	V	L
 TVVM23466-01
TVVM23 *,AN'
DNNJ
),AN +:QRS
,\QUVW #\:
  33 %4<<T^^9KLU[UbUbjpjvjv! ..4==26M/ $ 1 1$.. A{{4!8S%8 -q!0D D-}},,V\\^,DLLVT}},,\T\\TXTaTa,b &'/9L#((););<ell+BLR^_`!![j[!"34ff[) 

 /Gr?   FrQ  NNNF)r   r   r   r    r9  r   r  rT  r  r   r#  r   r   r   s   @r=   rV  rV  -  sX    ,| ,$ ,[_ ,4;  -  - ^ 6 Kr?   rV  c                        e Zd Zddededdf fdZd Zedd       Zde	fd	Z
d
ej                  dej                  dej                  fdZ	 	 	 	 ddZ xZS )LongT5TransientGlobalAttentionr   r   r'   Nc                    t         |           |j                  | _        || _        |j                  | _        |j
                  | _        |j                  | _        |j                  | _        |j                  | _
        |j                  | _        | j                  dz   | _        |j                  | _        |j                  | _        | j                  | j                  z  | _        t#        j$                  | j                  | j                   d      | _        t#        j$                  | j                  | j                   d      | _        t#        j$                  | j                  | j                   d      | _        t#        j$                  | j                   | j                  d      | _        | j                  r/t#        j.                  | j                  | j                        | _        t3               | _        | j                  r/t#        j.                  | j                  | j                        | _        t9        |j                  |j:                        | _        y )Nr   Fr   r   )r   r   r   r   r   r   r   r   r   r   r   rX  r$   rl   r   r   r   r   r   r   r   r   r   r   r   r   r   global_relative_attention_biasr   r   global_input_layer_normrY  s      r=   r   z'LongT5TransientGlobalAttention.__init__  s    +++F(.4.S.S+/5/U/U,~~"(++''"//**Q.!'!9!9**(?(?? 4<<eD4<<eD4<<eD4>>4<<eD+++-<<8[8[]a]i]i+jD(E ++24,,t?b?bdhdpdp2qD/'6v~~6KdKd'e$r?   c                    t        |      dk(  ry t        || j                  | j                  | j                        \  }}t        | j                  |      | _        t        | j                  |      | _        t        | j                  |      | _        t        | j                  |d      | _	        | j                  t        |      z
  | _        | j                  | j                  z  | _
        | j                  j                  |      | _        y r   r   r   s      r=   r  z*LongT5TransientGlobalAttention.prune_heads  r  r?   c                 T   d}|rC|dz  }|| dkD  j                  t        j                        |z  z  }t        j                  |       } n*t        j                  | t        j
                  |              } |dz  }| |k  }|t        j                  | j                         |z        t        j                  ||z        z  ||z
  z  j                  t        j                        z   }t        j                  |t        j                  ||dz
              }|t        j                  || |      z  }|S r  r  r  s           r=   r  z8LongT5TransientGlobalAttention._relative_position_bucket  r\  r?   r]  c                    | j                   j                  j                  j                  dk7  r | j                   j                  j                  nd}t	        j
                  d|z  t        j                  |      }|||  }|dddf   |dddf   z
  }| j                  || j                   | j                  | j                        }| j                  |      }|j                  g d      j                  d      j                  d      }|S r_  ra  rb  s           r=   r#  z+LongT5TransientGlobalAttention.compute_biasP  rd  r?   r   r   c                 v   t        j                  |d   |d d d d d f         d d d df   }t        j                  |dkD  dd      }t        || j                        }| j                  || j                   | j                  | j                        }| j                  |      }|j                  g d      }||z   }|S )Nr   .r   ry   rs  r  )r   r   r   rf   )r6   eqrq   r   rl   r  r   r   r   rz  r  )r   r   r   side_attention_maskattention_side_biasr   side_relative_position_bucket	side_biass           r=   compute_side_biasz0LongT5TransientGlobalAttention.compute_side_biash  s    #hhtI8J1dTU:8VWXY[_adXde#kk*=*A3N!A$H^H^!_(,(F(F"#.;;==	 )G )
% 778UV	 %%l3	1I=""r?   c                 D	    |j                   d d \  } fd} fd}t        ||n!t        j                  |j                   d d        j                        \  }	}
|
j                   d   }t        ||	|      } j                  |      } | j                  |            } | j                  |            } | j                  |            } | j                  |            } | j                  |            }t        | j                  d      }t        | j                  d      }t        | j                  d      }t        |dd      }t        |dd      }dg|j                  dz   z  }|j                   d   |d<   |j                  d      j                  |      }|j                  d      j                  |      }t        j                   ||gd      }t        j                   ||gd      }t        j"                  d||      }|<t%        | j                  |j&                        }t        j(                  |d	kD  d
d      }nd }|j j*                  srt        j,                  dd j.                   j                  d j                  z  f|j&                  |j0                        } j2                  r/ j4                  r#d|_        n j9                   j                        }|||j;                  dd      z   }|j=                  |j0                        }|t        j                  |      } j?                  ||
      }t        | j                  d      j;                  dd      }|j=                  |j0                        jA                  |j&                        }t        j                   ||gd      }||z  }tB        jD                  jG                  |jI                         d      jK                  |      }tB        jD                  jM                  | jL                   j4                        }|||z  }|j=                  |j0                        } |t        j"                  d||            }|d d d |d d f   } jO                  |      }||f}|r||fz   }|S )Nrf   c                 T    | j                  dj                  j                        S rg  ri  rj  s    r=   r3   z5LongT5TransientGlobalAttention.forward.<locals>.shape  rl  r?   c                 Z    | j                         j                  dj                        S rn  ro  rj  s    r=   rp  z7LongT5TransientGlobalAttention.forward.<locals>.unshape  rq  r?   r,   r   rK   rg   rr  r   ry   rs  r   r+  Trh   r,  rt  )(r3   r   r6   r   rl   r   r{  r   r   r   rG   r$   rT   r8   rX   r   rP   r   rk   rB   rq   r   r7   r   r*   r   r.  r8  r#  r   rp   r  r`   r   r:   r:  r  r;  r   r   )r   r   r   r>  r?  rA  rB  r3   rp  rn   r   _global_seq_lenglobal_inputsrD  rG  rH  side_key_statesside_value_statesrepsrI  r]   side_position_biasrM  rN  rO  r   s   `                        @r=   r   z&LongT5TransientGlobalAttention.forward}  sB    "/!4!4Ra!8
J	V	L )E$D%**]5H5H"5M*N"")
%	%
 -22261-O\44]C TVVM23466-01
TVVM23} 56!$&&"78 *,AN'
DNNJ
),AN +:QRS
,\QUVW so**Q./""1%Q)33A6==dC-77:AA$G YY
O<!D
yy,0A!BJ 5|ZP#<T4>>S`SgSg#h #(;;/Ca/Ge#T #'  33 %4<<T^^9KL!== ,,!
 ..4==26M/ $ 1 1$.. A#/ -0D0N0NqRS0T T)..v||<M |zz*j9!%!7!7>P!Q!34F\^!_!i!ijkmn!o!3!8!8!F!I!I&--!X!II}6H&IrRM-}},,V\\^,DLLVT}},,\T\\TXTaTa,b &'/9L#((););<ell+BLR^_`!![j[!"34ff[)./Gr?   ru  rQ  rv  )r   r   r   r    r9  r   r  rT  r  r   r#  r6   r{   r  r   r   r   s   @r=   rx  rx    s    f| f$ f[_ f>;  -  - ^ 0#ell # #Y^YeYe #0 ur?   rx  c                   ^     e Zd Zddee   f fdZ eddd      	 	 	 	 	 	 	 d	d       Z xZS )
LongT5LayerSelfAttentionr   c                     t         |           t        |||      | _        t	        |j
                  |j                        | _        t        j                  |j                        | _        y )Nr   r   r   )r   r   r   SelfAttentionr   r   r   r   r   r   r   r   r   s       r=   r   z!LongT5LayerSelfAttention.__init__  sT    ,0KW`
 *&..f>W>WXzz&"5"56r?   r$  r%  r&  r'  c	           
          | j                  |      }	| j                  |	|||||||      }
|| j                  |
d         z   }|f|
dd  z   }|S )N)r   r>  r?  r%  r@  rA  r  r   r   )r   r  r   )r   r   rd   r>  r?  r%  r@  rA  r  normed_hidden_statesattention_outputrO  s               r=   r   z LongT5LayerSelfAttention.forward  sv      $}=-- '++/) . 	
 &5Ea5H(II "%5ab%99r?   rP  )NNNNFFN	r   r   r   r   r   r   r   r   r   r   s   @r=   r  r    sK    7XVY] 7 %0A6R  Sr?   r  c                   F     e Zd ZdZddee   f fdZ	 	 	 	 ddefdZ xZ	S )LongT5LayerLocalSelfAttentionz$Local self attention used in encoderr   c                     t         |           t        ||      | _        t	        |j
                  |j                        | _        t        j                  |j                        | _        y N)r   r   )r   r   rV  LocalSelfAttentionr   r   r   r   r   r   r   r   r   s       r=   r   z&LongT5LayerLocalSelfAttention.__init__  sL    "6v[v"w)&..f>W>WXzz&"5"56r?   kwargsc                     | j                  |      }| j                  |||||      }|| j                  |d         z   }|f|dd  z   }	|	S N)r   r>  r?  rA  r   r   )r   r  r   
r   r   rd   r>  r?  rA  r  r  r  rO  s
             r=   r   z%LongT5LayerLocalSelfAttention.forward$  sm      $}=22 '+/ 3 
 &5Ea5H(II "%5ab%99r?   rP  rv  
r   r   r   __doc__r   r   r   r   r   r   r   s   @r=   r  r    s4    .7XVY] 7  r?   r  c                   F     e Zd ZdZddee   f fdZ	 	 	 	 ddefdZ xZ	S )'LongT5LayerTransientGlobalSelfAttentionz/Transient-Global self attention used in encoderr   c                     t         |           t        ||      | _        t	        |j
                  |j                        | _        t        j                  |j                        | _        y r  )r   r   rx  TransientGlobalSelfAttentionr   r   r   r   r   r   r   r   r   s       r=   r   z0LongT5LayerTransientGlobalSelfAttention.__init__=  sQ    ,J0K-
) *&..f>W>WXzz&"5"56r?   r  c                     | j                  |      }| j                  |||||      }|| j                  |d         z   }|f|dd  z   }	|	S r  )r   r  r   r  s
             r=   r   z/LongT5LayerTransientGlobalSelfAttention.forwardE  sm      $}=<< '+/ = 
 &5Ea5H(II "%5ab%99r?   rP  rv  r  r   s   @r=   r  r  :  s4    97XVY] 7  r?   r  c                   `     e Zd Zddee   f fdZ eddd      	 	 	 	 	 	 	 	 d	d       Z xZS )
LongT5LayerCrossAttentionr   c                     t         |           t        |d|      | _        t	        |j
                  |j                        | _        t        j                  |j                        | _        y )NFr  r   )r   r   r   EncDecAttentionr   r   r   r   r   r   r   r   )r   r   r   r   s      r=   r   z"LongT5LayerCrossAttention.__init__]  sO    .vSXdmn)&..f>W>WXzz&"5"56r?   r$  r%  r&  r'  c                     | j                  |      }| j                  |||||||||	|

      }|| j                  |d         z   }|f|dd  z   }|S )N)	r   r=  r>  r?  r%  r@  r  rA  r  r   r   )r   r  r   )r   r   r=  rd   r>  r?  r%  r@  r  rA  r  r  r  layer_outputrO  s                  r=   r   z!LongT5LayerCrossAttention.forwardc  s{      $}=// -'++%/) 0 
 %t||4DQ4G'HH/$4QR$88r?   r   )NNNNFNFNr  r   s   @r=   r  r  \  sM    7(3- 7 %0A6R
  Sr?   r  c                   h     e Zd Zddee   f fdZ eddd      	 	 	 	 	 	 	 	 	 	 	 	 d	d       Z xZS )
LongT5Blockr   c                    t         |           |j                  | _        |j                  rt        }nE|j                  dk(  rt
        }n/|j                  dk(  rt        }nt        d|j                   d      t        j                         | _
        | j                  j                   ||||             | j                  r&| j                  j                  t        ||             | j                  j                  t        |             y )Nlocalztransient-globalzjFor encoder attention mechanism, either `local` or `transient-global` attention type is expected, but got .r  )r   )r   r   r   r  encoder_attention_typer  r  
ValueErrorr   
ModuleListlayerrO   r  r   )r   r   r   r   attention_layerr   s        r=   r   zLongT5Block.__init__  s     ++6O**g5;O**.@@EO!889<  ]]_


F@[gpq	
 ??JJ7)TU

-/0r?   r$  r%  r&  r'  c                 0    | j                   d   |||||	|
||      }|d   }|dd  }|j                  t        j                  k(  rht        j                  |      j                         rEt        j                  |j                        j                  dz
  }t        j                  || |      }| j                  xr |d u}|r | j                   d   ||||||	|d   dz   |
||
      }|d   }|j                  t        j                  k(  rht        j                  |      j                         rEt        j                  |j                        j                  dz
  }t        j                  || |      }||dd  z   } | j                   d   |      }|j                  t        j                  k(  rht        j                  |      j                         rEt        j                  |j                        j                  dz
  }t        j                  || |      }|f|z   S )Nr   )rd   r>  r?  r%  r@  rA  r  r   i  )r  r   r,   )	r=  rd   r>  r?  r%  r  r@  rA  r  )
r  r*   r6   r   isinfanyfinfor   clampr   )r   r   rd   r>  encoder_hidden_statesencoder_attention_maskencoder_decoder_position_biasr?  cross_attn_layer_head_maskr%  r@  rA  return_dictr  self_attention_outputsattention_outputsclamp_valuedo_cross_attentioncross_attention_outputss                      r=   r   zLongT5Block.forward  s   " "/A)'++/)	"
 /q12126 %--/EKK4N4R4R4T++m&9&9:>>EK!KKK<[YM!__R1Fd1R&3djjm!65; : /+B/!3#"3-'# 4A6M ""emm3M8R8V8V8X#kk-*=*=>BBTI %M|Q\ ] !24KAB4O O '

2}5 %--/EKK4N4R4R4T++m&9&9:>>EK!KKK<[YM 00	
r?   rP  )NNNNNNNNFFTNr  r   s   @r=   r  r    s\    1XVY] 1. %0A6R "#&*#'D
 SD
r?   r  c                   F    e Zd ZU eed<   dZdZdgZdZe	d        Z
d Zd Zy	)
LongT5PreTrainedModelr   transformerTr  Fc                 v    t        j                  t              }t        j                  t              }|||d}|S )N)decoder_input_ids	input_idsdecoder_attention_mask)r6   r   r   r   )r   r  
input_maskdummy_inputss       r=   r  z"LongT5PreTrainedModel.dummy_inputs  s8     LL.	\\*-
!*"&0

 r?   c                 N   | j                   j                  }t        |t              r)|j                  j
                  j                  |dz         yt        |t        t        t        f      r|j                  j                  j
                  j                  d|dz         t        |d      rL| j                   j                  s5|j                  j                  j
                  j                  d|dz         yyyt        |t              rM|j                   j                  j
                  j                  d|| j                   j"                  dz  z         t        |j                   d      rD|j                   j$                  .|j                   j$                  j
                  j'                          |j(                  j                  j
                  j                  d|| j                   j*                  dz  z         t        |j(                  d      rF|j(                  j$                  /|j(                  j$                  j
                  j'                          yyyt        |t,              r|j.                  j                  j
                  j                  d|| j                   j"                  dz  z         t        |j.                  d      rD|j.                  j$                  .|j.                  j$                  j
                  j'                          |j0                  j                  j
                  j                  d|| j                   j"                  dz  z         t        |j0                  d      rD|j0                  j$                  .|j0                  j$                  j
                  j'                          |j(                  j                  j
                  j                  d|| j                   j*                  dz  z         t        |j(                  d      rF|j(                  j$                  /|j(                  j$                  j
                  j'                          yyyt        |t2        t4        t6        f      r| j                   j"                  }| j                   j8                  }| j                   j:                  }|j<                  j                  j
                  j                  d|||z  dz  z         |j>                  j                  j
                  j                  d||dz  z         |j@                  j                  j
                  j                  d||dz  z         |jB                  j                  j
                  j                  d|||z  dz  z         |jD                  r|jF                  j                  j
                  j                  d||dz  z         t        |t6              r8|jH                  j                  j
                  j                  d||dz  z         yyyy)zInitialize the weightsrz   ry   )r   stdlm_head      r   N)%r   initializer_factorr   r   r   datafill_LongT5ModelLongT5ForConditionalGenerationLongT5EncoderModelsharednormal_hasattrtie_word_embeddingsr  r   r   r   r   zero_r   r   r   r   r   r   rV  rx  r   r   r   r   r   r   r   r   rz  )r   modulefactorr   r   r   s         r=   _init_weightsz#LongT5PreTrainedModel._init_weights  s   //fo.MM$$Vc\2.LN` ab MM  %%--3FSL-Ivy)$++2Q2Q%%**22#2N 3R) 34 II!!))s4;;CVCV[_B_8`)avyy&)fiinn.H		##))+II!!))s4;;CSCSX\B\8])^vyy&)fiinn.H		##))+ /I) 89KK##++&T[[EXEX]aDa:b+cv{{F+0@0@0L  %%++-KK##++&T[[EXEX]aDa:b+cv{{F+0@0@0L  %%++-II!!))s4;;CSCSX\B\8])^vyy&)fiinn.H		##))+ /I)2FHf gh kk))G!%!1!1kk++GHHOO  ((cv'L^B^cgAg7h(iHHOO  ((cv$7O(PHHOO  ((cv$7O(PHHOO  ((cv'L^B^cgAg7h(i11..55::BBQW\chl[lQmBnf&DE99@@EEMM fT0A&B N  F 2 ir?   c                    | j                   j                  }| j                   j                  }|t        d      t	        |      rGt        j                  |j                  d d dz   |      }t        j                  ||dd df   gd      }n>|j                  |j                        }|dd df   j                         |ddd f<   ||d<   |t        d      |j                  |d	k(  |       |S )
Nzself.model.config.decoder_start_token_id has to be defined. In LongT5 it is usually set to the pad_token_id. See LongT5 docs for more information.r,   )r   .rK   r   ).r   z1self.model.config.pad_token_id has to be defined.)r   decoder_start_token_idpad_token_idr  r   r6   fullr3   rP   	new_zerosclonemasked_fill_)r   r  r  r  shifted_input_idss        r=   _shift_rightz"LongT5PreTrainedModel._shift_right)  s    !%!C!C{{//!)8  Y' %

9??3B+?$+FH^ _ %		+<iSbS>Q*RXZ [ ) 3 3IOO D)238)<)B)B)Dc12g&(>f%PQQ&&'8D'@,O  r?   N)r   r   r   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_can_compile_fullgraphpropertyr  r  r  r-   r?   r=   r  r    s?    %&*#&" .b!r?   r  c                       e Zd Zd fd	Zd Z	 	 	 	 	 	 	 	 	 	 	 	 	 ddZ	 ddeej                  df   dej                  dej                  de	d	e
f
d
Zedej                  dededej                  dej                  defd       Z xZS )LongT5Stackc                    t         |   |       t        j                  |j                  |j
                        | _        ||j                  | j                  _        |j                  | _        |j                  | _	        | j                  dz   | _
        t        j                  t        |j                        D cg c]  }t        |t        |dk(        |       c}      | _        t#        |j
                  |j$                        | _        t        j(                  |j*                        | _        d| _        | j1                          y c c}w )Nr   r   r  r   F)r   r   r   r   
vocab_sizer   embed_tokensr   r   rX  r$   r  rL   
num_layersr  r9  blockr   r   final_layer_normr   r   r   r   	post_init)r   r   r  rR   r   s       r=   r   zLongT5Stack.__init__F  s    LL):):FNNK#'3':':D$ ++"//**Q.]] v001 FQ!VXYZ

 !0FD]D] ^zz&"5"56&+# 	s   9!Ec                     || _         y r   )r  r   new_embeddingss     r=   set_input_embeddingsz LongT5Stack.set_input_embeddings`  s
    *r?   c                    |	|	n| j                   j                  }	|
|
n| j                   j                  }
||n| j                   j                  }||n| j                   j                  }|$|"| j
                  rdnd}t        d| d| d      |&|j                         }|j                  d|d         }n8||j                         d d }n"| j
                  rdnd}t        d| d| d	      | j                  r%| j                  r|	rt        j                  d
       d}	|$| j                  J d       | j                  |      }|\  }}| j
                  rf|	rr|p| j                   j                  r5t        t!        | j                         t!        | j                               }n%t!        | j                         }n| j
                  sd }||j#                         nd}|%t%        j&                  |||z   |j(                        }|1t+               s'||z   }t%        j,                  |||j(                        }| j
                  r2| j/                  |||t1        |t              r|j2                  n||
      }n=| j                   j4                  dk(  r"t7        || j8                  |j(                        }n|}| j
                  rO|M|j                         \  }}}||f}|!t%        j,                  ||j(                        }| j;                  |      }nd }| j=                  || j                   j>                        }| j=                  || j                   j>                        }|rdnd }|
rdnd }|
r| j
                  rdnd }d }d }| jA                  |      }tC        | jD                        D ]o  \  } }!||    }"||    }#|r||fz   } |!|||||||"|#||	|
||      }$|$d   }|$d   }| j
                  r|	|$|
rdnd   }|
sQ||$d   fz   }| j
                  sg||$d   fz   }q | jG                  |      }| jA                  |      }|r||fz   }|stI        d |||||fD              S tK        |||||      S )Ndecoder_ zYou cannot specify both zinput_ids and zinputs_embeds at the same timer,   zYou have to specify either zinput_ids or inputs_embedszZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...Fz<You have to initialize the model with valid token embeddings)r   r   rw   r  r-   )r?  r  r%  r@  rA  r  r  r   r   rf      c              3   $   K   | ]  }|| 
 y wr   r-   ).0r   s     r=   	<genexpr>z&LongT5Stack.forward.<locals>.<genexpr>  s      
 = 
s   )last_hidden_stater%  r   
attentionscross_attentions)&r   r@  rA  output_hidden_statesuse_return_dictr   r  sizer/  r   r.  r   r   r  is_encoder_decoderr   r   get_seq_lengthr6   rV   rB   r   r   _update_causal_maskr   r3  r  rk   r$   invert_attention_maskget_head_maskr  r   	enumerater  r  rN   r   )%r   r  rd   r  r  r  	head_maskcross_attn_head_maskr%  r@  rA  r
  r  r  err_msg_prefixinput_shaper   rB  past_key_values_lengthmask_seq_lengthrK  encoder_batch_sizeencoder_sequence_length_encoder_hidden_shapeencoder_extended_attention_maskall_hidden_statesall_attentionsall_cross_attentionsr>  r  r   rR   layer_moduler?  r  layer_outputss%                                        r=   r   zLongT5Stack.forwardc  s     "+!6IDKK<Q<Q	1B1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B] ]%>+/??ZN*>*:.HXXvw  "#..*K!r;r?;I&',,.s3K+/??ZN:>:J-XfWggtuvv&&4==##p "	 $$0p2pp0 --i8M!,
J??_4;;11&9$DKK8,dkk:Z'O '3$++&FO #OETE`!?!?!Afg!"\\&(>(KTaThThN !*B*D4zAO"ZZ
OML`L`aN??22o/BC  44$!K [[//7:3NDNNTaThThiK(K ??4@=R=W=W=Y: 7$68O#P %-).4HQ^QeQe)f&.2.H.HI_.`+.2+ &&y$++2H2HI	#112FH^H^_"6BD0d&7DOOrRV(,%]3(4 &	VOA|'lO)=a)@&#$58H$H!(%/- /+E /#"3'-M& *!,M
 *!,M#8#D0=CTaZ[0\- !/=3C2E!E??+?=QRCSBU+U(M&	VP --m<]3   1]4D D 
 "#%"(
 
 
 9+++%1
 	
r?   rd   r!   input_tensorr  r%  rA  c           	         | j                   j                  dk(  r||dk(  j                         r|S y | j                   j                  dk(  r't        |t        j
                        rt        |      }|S ||j                         nd}||j                  nd}| j                   j                  dk(  r(|s&|s$t        j                  |||| j                        ry |j                  }|j                  d   }	|r|j                         }
n1t        |t        j
                        r|j                  d	   n||	z   dz   }
| j                  ||	|
|||j                  d   
      }| j                   j                  dk(  rQ|O|j                   j"                  dv r7|s5t	        j$                  |      j&                  }t        j(                  ||      }|S )Nflash_attention_2ry   flex_attentionr   Fsdpa)r  r  is_trainingr   r,   )sequence_lengthtarget_lengthr*   r  r   )cudaxpunpu)r   _attn_implementationr  r   r6   r{   r"   r  is_compileabler   _ignore_causal_mask_sdpar.  r*   r3   get_max_cache_shape5_prepare_4d_causal_attention_mask_with_cache_positionrB   rp   r  r  _unmask_unattended)r   rd   r#  r  r%  rA  past_seen_tokensusing_compilable_cacher*   r)  r*  rK  	min_dtypes                r=   r  zLongT5Stack._update_causal_mask  s    ;;++/BB)~/D.I.I.K%%;;++/??.%,,7!<^!L!!
 @O?Z?99;`aCRC^!?!?di ;;++v5>T]n%>>*'7 MM	 ""&,,Q/!+??AM nell; $$R(%7!;  PP+')#))!, Q 
 KK,,6*%%**.DD%
 E*..I0CCKQZ[Kr?   r)  r*  r*   r   c                    | | j                         dk(  r| }|S t        j                  |      j                  }t        j                  ||f|||j
                        }|dk7  rt        j                  |d      }|t        j                  ||j
                        |j                  dd      kD  z  }|ddddddf   j                  |ddd      }| |j                         }| j                  d   }	|ddddddd|	f   | ddddddf   j                  |j
                        z   }
|
dk(  }
|ddddddd|	f   j                  |
|      |ddddddd|	f<   |S )	aM  
        Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape
        `(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing.

        Args:
            attention_mask (`torch.Tensor`):
                A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attention mask of shape
                `(batch_size, 1, query_length, key_value_length)`.
            sequence_length (`int`):
                The sequence length being processed.
            target_length (`int`):
                The target length: when generating with static cache, the mask should be as long as the static cache,
                to account for the 0 padding, the part of the cache that is not filled yet.
            dtype (`torch.dtype`):
                The dtype to use for the 4D attention mask.
            cache_position (`torch.Tensor`):
                Indices depicting the position of the input sequence tokens in the sequence.
            batch_size (`torch.Tensor`):
                Batch size.
        Nr  )
fill_valuer*   rB   r   )diagonalrw   r,   r   )r%   r6   r  r  r  rB   triurV   rD   expandr  r3   r`   masked_fill)rd   r)  r*  r*   r  r   r  rK  r6  mask_lengthpadding_masks              r=   r2  zALongT5Stack._prepare_4d_causal_attention_mask_with_cache_position[  s   > %.*<*<*>!*C(K* ' E*..I** -0Ye\j\q\qK !##jjqA5<<n>S>STWeWmWmnprsWtttK%dD!Q&67>>z1bRTUK))//1,2226*1aL[L+@ANSTVZ\`bcScDdDgDg&&E    ,q05@Aq,;,AV5W5c5c )6Aq!\k\12 r?   r   )NNNNNNNNNNNNNru  )r   r   r   r   r  r   r   r6   r{   r
   r9  r  rT  r   r*   r2  r   r   s   @r=   r  r  E  s    4+
 "#!!q
t #(BellK78B llB 	B
 B  BH 444 4 {{	4
 4 4 4r?   r  a_  
The input argument `head_mask` was split into two arguments `head_mask` and `decoder_head_mask`. Currently,
`decoder_head_mask` is set to copy `head_mask`, but this feature is deprecated and will be removed in future versions.
If you do not want to use any `decoder_head_mask` now, please set `decoder_head_mask = torch.ones(num_layers,
num_heads)`.
c            &       4    e Zd ZdgZddgZdef fdZd Zd Zd Z	d	 Z
d
 Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                      deej                     deej"                     deej                      deej                      deej$                     deeeej                            dee   deej$                     deej$                     dee   dee   dee   dee   deej                     deeej                      ef   f"d       Z xZS )r  Fdecoder.block.0.layer.1.EncDecAttention.relative_attention_bias.weightencoder.embed_tokens.weightdecoder.embed_tokens.weightr   c                    t         |   |       t        j                  |j                  |j
                        | _        t        j                  |      }d|_	        d|_
        d|_        t        || j                        | _        t        j                  |      }d|_	        d|_        |j                  |_        t        || j                        | _        | j#                          y )NFT)r   r   r   r   r  r   r  copydeepcopyr   r@  tie_encoder_decoderr  encodernum_decoder_layersr  decoderr  r   r   encoder_configdecoder_configr   s       r=   r   zLongT5Model.__init__  s     ll6#4#4fnnEv.$)!#( -2*">4;;?v.$(!-2*$*$=$=!">4;;? 	r?   c                     | j                   S r   r  r   s    r=   get_input_embeddingsz LongT5Model.get_input_embeddings      {{r?   c                 ~    || _         | j                  j                  |       | j                  j                  |       y r   r  rG  r  rI  r  s     r=   r  z LongT5Model.set_input_embeddings  -    $)).9)).9r?   c                     | j                   j                  ra| j                  | j                  j                  | j
                         | j                  | j                  j                  | j
                         y y r   r   r  _tie_or_clone_weightsrG  r  r  rI  rO  s    r=   _tie_weightszLongT5Model._tie_weights  P    ;;**&&t||'@'@$++N&&t||'@'@$++N +r?   c                     | j                   S r   rG  rO  s    r=   get_encoderzLongT5Model.get_encoder      ||r?   c                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 yz
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        NitemsrG  r  	attentionr  r   heads_to_pruner  r   s       r=   _prune_headszLongT5Model._prune_heads  E    
 +002 	CLE5LLu%//;;EB	Cr?   r  rd   r  r  r  decoder_head_maskr  encoder_outputsr%  r  decoder_inputs_embedsr@  rA  r
  r  r  r'   c                    ||n| j                   j                  }||n| j                   j                  }|O|M| j                   j                  | j                   j                  k(  r t        j                  t        t               |}|| j                  |||
||||      }nI|rGt        |t              s7t        |d   t        |      dkD  r|d   ndt        |      dkD  r|d   nd      }|d   }| j                  ||||	|||||||||      }|s||z   S t        |j                  |j                   |j"                  |j$                  |j&                  |j                  |j"                  |j$                        S )	ax  
        input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. LongT5 is a model with relative position embeddings so
            you should be able to pad the inputs on both the right and the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for detail.

            [What are input IDs?](../glossary#input-ids)

            To know more on how to prepare `input_ids` for pretraining take a look a [LONGT5
            Training](./longt5#training).
        decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Indices of decoder input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are decoder input IDs?](../glossary#decoder-input-ids)

            LONGT5 uses the `pad_token_id` as the starting token for `decoder_input_ids` generation. If
            `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
            `past_key_values`).

            To know more on how to prepare `decoder_input_ids` for pretraining take a look at [LONGT5
            Training](./longt5#training).
        decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
            be used by default.
        decoder_head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules in the decoder. Mask values selected in `[0,
            1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.
        cross_attn_head_mask (`torch.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the cross-attention modules in the decoder. Mask values selected in
            `[0, 1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, LongT5Model

        >>> tokenizer = AutoTokenizer.from_pretrained("google/long-t5-local-base")
        >>> model = LongT5Model.from_pretrained("google/long-t5-local-base")

        >>> # Let's try a very long encoder input.
        >>> input_ids = tokenizer(
        ...     100 * "Studies have been shown that owning a dog is good for you", return_tensors="pt"
        ... ).input_ids  # Batch size 1

        >>> decoder_input_ids = tokenizer("Studies show that", return_tensors="pt").input_ids  # Batch size 1

        >>> # forward pass
        >>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids)
        >>> last_hidden_states = outputs.last_hidden_state
        ```Nr  rd   r  r  rA  r
  r  r   r   rf   r  r   r  r  rd   r  r%  r  r  r  r  r@  rA  r
  r  r  )r  r%  decoder_hidden_statesdecoder_attentionsr	  encoder_last_hidden_stater  encoder_attentions)r   r@  r  r  rH  warningswarn#_LongT5Model__HEAD_MASK_WARNING_MSGFutureWarningrG  r   r   r   rI  r   r  r%  r   r  r	  )r   r  rd   r  r  r  rg  r  rh  r%  r  ri  r@  rA  r
  r  r  r   decoder_outputss                      r=   r   zLongT5Model.forward  s   b "+!6IDKK<Q<Q	%0%<k$++B]B]  %6%>{{%%)G)GG5}E$-! ""ll#-+#"3%9' + O O_!M-"1!"4474H14Loa0RV14_1E1I?1-tO (* ,,'1/+"/#1'!5/!5#) ' 
  "_44!-??+;;"1"?"?.99,==&5&G&G"1"?"?.99	
 		
r?   )NNNNNNNNNNNNNNNN)r   r   r   "_keys_to_ignore_on_load_unexpected_tied_weights_keysr    r   rP  r  rX  r\  re  r   r   r6   
LongTensorFloatTensor
BoolTensorr{   rN   r
   r9  r   r   r   r   r   s   @r=   r  r    s    	R*& 89VW| &:
O
C  156:8<=A159=7;EI+/048<$(,0/3&*59#J
E,,-J
 !!2!23J
 $E$4$45	J

 !))9)9 :J
 E--.J
 $E$5$56J
 'u||4J
 "%e.?.?(@"ABJ
 "%J
  -J
  (5J
 D>J
 $D>J
 'tnJ
  d^!J
" !!1!12#J
$ 
uU&&');;	<%J
 J
r?   r  z>
    LONGT5 Model with a `language modeling` head on top.
    )custom_introc            (       n    e Zd ZdgZg dZdef fdZd Zd Zd Z	d Z
e	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd	eej                     d
eej                     deej                     deej                      deej                     deej                     deej"                     deeeej"                           dee   deej                     deej                     deej                     dee   dee   dee   dee   deej                     deeej                     ef   f$d       Zdej"                  fdZ xZS )r  r@  )rA  rB  zlm_head.weightr   c                 N   t         |   |       |j                  | _        t	        j
                  |j                  |j                        | _        t        j                  |      }d|_
        d|_        d|_        t        || j                        | _        t        j                  |      }d|_
        d|_        |j                  |_        t        || j                        | _        t	        j$                  |j                  |j                  d      | _        | j)                          y )NFTr   )r   r   r   	model_dimr   r   r  r  rD  rE  r   r@  rF  r  rG  rH  r  rI  r   r  r  rJ  s       r=   r   z'LongT5ForConditionalGeneration.__init__h  s     ll6#4#4fnnEv.$)!#( -2*">4;;?v.$(!-2*$*$=$=!">4;;?yy1B1BO 	r?   c                     | j                   S r   rN  rO  s    r=   rP  z3LongT5ForConditionalGeneration.get_input_embeddings  rQ  r?   c                 ~    || _         | j                  j                  |       | j                  j                  |       y r   rS  r  s     r=   r  z3LongT5ForConditionalGeneration.set_input_embeddings  rT  r?   c                     | j                   j                  ra| j                  | j                  j                  | j
                         | j                  | j                  j                  | j
                         y y r   rV  rO  s    r=   rX  z+LongT5ForConditionalGeneration._tie_weights  rY  r?   c                     | j                   S r   r[  rO  s    r=   r\  z*LongT5ForConditionalGeneration.get_encoder  r]  r?   r  rd   r  r  r  rg  r  rh  r%  r  ri  labelsr@  rA  r
  r  r  r'   c                 l   ||n| j                   j                  }||n| j                   j                  }|O|M| j                   j                  | j                   j                  k(  r t        j                  t        t               |}|| j                  |||
||||      }nI|rGt        |t              s7t        |d   t        |      dkD  r|d   ndt        |      dkD  r|d   nd      }|d   }|||| j                  |      }| j                  ||||	|||||||||      }|d   }| j                   j                  r|| j                   dz  z  }| j#                  |      }d}|^t%        d	
      }|j'                  |j(                        } ||j+                  d|j-                  d            |j+                  d            }|s|f|dd z   |z   }||f|z   S |S t/        |||j0                  |j2                  |j4                  |j6                  |j8                  |j2                  |j4                  	      S )a  
        input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. LongT5 is a model with relative position embeddings so
            you should be able to pad the inputs on both the right and the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for detail.

            [What are input IDs?](../glossary#input-ids)

            To know more on how to prepare `input_ids` for pretraining take a look a [LONGT5
            Training](./longt5#training).
        decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Indices of decoder input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are decoder input IDs?](../glossary#decoder-input-ids)

            LONGT5 uses the `pad_token_id` as the starting token for `decoder_input_ids` generation. If
            `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
            `past_key_values`).

            To know more on how to prepare `decoder_input_ids` for pretraining take a look at [LONGT5
            Training](./longt5#training).
        decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
            Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
            be used by default.
        decoder_head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the self-attention modules in the decoder. Mask values selected in `[0,
            1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.
        cross_attn_head_mask (`torch.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
            Mask to nullify selected heads of the cross-attention modules in the decoder. Mask values selected in
            `[0, 1]`:

            - 1 indicates the head is **not masked**,
            - 0 indicates the head is **masked**.
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[-100, 0, ...,
            config.vocab_size - 1]`. All labels set to `-100` are ignored (masked), the loss is only computed for
            labels in `[0, ..., config.vocab_size]`

        Examples:

        ```python
        >>> from transformers import AutoTokenizer, LongT5ForConditionalGeneration

        >>> tokenizer = AutoTokenizer.from_pretrained("Stancld/longt5-tglobal-large-16384-pubmed-3k_steps")
        >>> model = LongT5ForConditionalGeneration.from_pretrained(
        ...     "Stancld/longt5-tglobal-large-16384-pubmed-3k_steps"
        ... )

        >>> # Let's try a very long input.
        >>> inputs = tokenizer(100 * "studies have shown that owning a dog is good for you ", return_tensors="pt")
        >>> input_ids = inputs.input_ids

        >>> outputs = model.generate(input_ids)
        >>> print(tokenizer.decode(outputs[0], skip_special_tokens=True))
        abstractthe aim of this article is to provide an overview of the literature on the role of dog
        ```Nrk  r   r   rf   rl  rm  r  r  )ignore_indexr,   )	losslogitsr%  rn  ro  r	  rp  r  rq  )r   r@  r  r  rH  rr  rs  6_LongT5ForConditionalGeneration__HEAD_MASK_WARNING_MSGru  rG  r   r   r   r  rI  r  r  r  r   r`   rB   r/  r  r   r%  r   r  r	  r  )r   r  rd   r  r  r  rg  r  rh  r%  r  ri  r  r@  rA  r
  r  r  r   rv  sequence_output	lm_logitsr  loss_fctoutputs                            r=   r   z&LongT5ForConditionalGeneration.forward  sl   j "+!6IDKK<Q<Q	%0%<k$++B]B]  %6%>{{%%)G)GG5}E$-! ""ll#-+#"3%9' + O O_!M-"1!"4474H14Loa0RV14_1E1I?1-tO (*"3";@U@] $ 1 1& 9 ,,'1/+"/#1'!5/!5#) ' 
  *!,;;** .1EFOLL1	'T:HYYy//0FINN2y~~b/ABFKKPROTD \OAB$77/IF)-)9TGf$EvE+;;"1"?"?.99,==&5&G&G"1"?"?.99

 
	
r?   c                 $    | j                  |      S r   )r  )r   r  s     r=   %prepare_decoder_input_ids_from_labelszDLongT5ForConditionalGeneration.prepare_decoder_input_ids_from_labels8  s      ((r?   )NNNNNNNNNNNNNNNNN)r   r   r   rw  rx  r    r   rP  r  rX  r\  r   r   r6   ry  rz  r{  r{   rN   r
   r9  r   r   r   r  r   r   s   @r=   r  r  ]  s    	R*& j| .:
O
  156:8<=A159=7;@D+/59=A-1$(,0/3&*59%f
E,,-f
 !!2!23f
 $E$4$45	f

 !))9)9 :f
 E--.f
 $E$5$56f
 'u||4f
 "%ell(;"<=f
 "%f
   1 12f
  ((9(9:f
 ))*f
 D>f
 $D>f
  'tn!f
" d^#f
$ !!1!12%f
& 
uU&&'8	9'f
 f
P)ELL )r?   r  c                   .    e Zd ZdgZdgZdef fdZd Zd Zd Z	d Z
d	 Ze	 	 	 	 	 	 	 dd
eej                     deej                      deej                      deej                      dee   dee   dee   deeej                      ef   fd       Z xZS )r  rA  rI  r   c                    t         |   |       t        j                  |j                  |j
                        | _        t        j                  |      }d|_	        d|_
        t        || j                        | _        | j                          y )NF)r   r   r   r   r  r   r  rD  rE  r@  rF  r  rG  r  )r   r   rK  r   s      r=   r   zLongT5EncoderModel.__init__A  sh     ll6#4#4fnnEv.#( -2*">4;;? 	r?   c                     | j                   S r   rN  rO  s    r=   rP  z'LongT5EncoderModel.get_input_embeddingsM  rQ  r?   c                 H    || _         | j                  j                  |       y r   )r  rG  r  r  s     r=   r  z'LongT5EncoderModel.set_input_embeddingsP  s    $)).9r?   c                     | j                   j                  r1| j                  | j                  j                  | j
                         y y r   )r   r  rW  rG  r  r  rO  s    r=   rX  zLongT5EncoderModel._tie_weightsT  s2    ;;**&&t||'@'@$++N +r?   c                     | j                   S r   r[  rO  s    r=   r\  zLongT5EncoderModel.get_encoderX  r]  r?   c                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 yr_  r`  rc  s       r=   re  zLongT5EncoderModel._prune_heads[  rf  r?   r  rd   r  r  rA  r
  r  r'   c           	      j    ||n| j                   j                  }| j                  |||||||      }|S )a  
        input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
            Indices of input sequence tokens in the vocabulary. LongT5 is a model with relative position embeddings so
            you should be able to pad the inputs on both the right and the left.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for detail.

            To know more on how to prepare `input_ids` for pretraining take a look a [LONGT5
            Training](./longt5#training).

        Example:

        ```python
        >>> from transformers import AutoTokenizer, LongT5ForConditionalGeneration

        >>> tokenizer = AutoTokenizer.from_pretrained("google/long-t5-local-base")
        >>> model = LongT5EncoderModel.from_pretrained("google/long-t5-local-base")
        >>> input_ids = tokenizer(
        ...     100 * "Studies have been shown that owning a dog is good for you ", return_tensors="pt"
        ... ).input_ids  # Batch size 1
        >>> outputs = model(input_ids=input_ids)
        >>> last_hidden_states = outputs.last_hidden_state
        ```rk  )r   r  rG  )	r   r  rd   r  r  rA  r
  r  rh  s	            r=   r   zLongT5EncoderModel.forwardc  sK    F &1%<k$++B]B],,)'/!5# ' 
 r?   )NNNNNNN)r   r   r   rx  rw  r    r   rP  r  rX  r\  re  r   r   r6   ry  rz  r9  r   rN   r   r   r   r   s   @r=   r  r  <  s   78*4&
| 
:OC  156:1559,0/3&*.E,,-. !!2!23. E--.	.
   1 12. $D>. 'tn. d^. 
uU&&'8	9. .r?   r  )r  r  r  r  )r   )\r  rD  r  rr  typingr   r   r   r6   r   torch.nnr   activationsr	   cache_utilsr
   r   r   
generationr   modeling_attn_mask_utilsr   modeling_layersr   modeling_outputsr   r   r   r   modeling_utilsr   pytorch_utilsr   r   utilsr   r   r   r   r   r   r   utils.deprecationr   configuration_longt5r    !torch.nn.attention.flex_attentionr!   integrations.flex_attentionr"   
get_loggerr   r   r{   r   r>   rG   rT   r\   rc   rB   rk   rN   r   r   r   Moduler   apex.normalizationr   infoImportError	Exceptionwarningr   r   r   r   rV  rx  r  r  r  r  r  r  r  __HEAD_MASK_WARNING_MSGr  r  r  __all__r-   r?   r=   <module>r     s       ' '   % ! C C ) > 9  . Q   1 .  !;J 
		H	%  3 3 W\WcWc  #%,, #3 #S #U\\ #4U\\ 4c 4 4Y\ 4ejeqeq 42!# !%,, !BU\\ Bc BV[VbVb B8ell 8s 8TYT`T` 8ejeqeq 8 .PLL.P58.P
5<<%&.Pb4U\\ 4VY 4^c^j^j 4	j<<	j,1LL	jJM	j
\\	j+bii +2	/"O
KKef")) ,ryy &BII &ebii eP299 DCRYY CN"ryy "JBII >bii D$		 $N]
, ]
@ ^!O ^! ^!BL' L`
  |
' |
 |
~ 
W)%:O W)
W)t U. U Up kk<  	 	
NN[\	s   3M M$M$#M$