
    h:                    @   d Z ddlZddlmZmZ ddlZddlZddlmZ ddlm	Z	m
Z
mZ ddlmZmZ ddlmZmZmZ dd	lmZ dd
lmZ ddlmZmZmZmZmZmZmZmZ ddl m!Z! ddl"m#Z#m$Z$m%Z% ddl&m'Z'm(Z( ddl)m*Z* ddl+m,Z,  e(jZ                  e.      Z/ G d dej`                        Z1 G d dej`                        Z2 G d dej`                        Z3 G d dej`                        Z4 G d dej`                        Z5 G d dej`                        Z6 G d dej`                        Z7 G d  d!e      Z8 G d" d#ej`                        Z9 G d$ d%ej`                        Z:e' G d& d'e!             Z; e'd()       G d* d+e;             Z< e'd,)       G d- d.e;e             Z=e' G d/ d0e;             Z> G d1 d2ej`                        Z? e'd3)       G d4 d5e;             Z@e' G d6 d7e;             ZAe' G d8 d9e;             ZB G d: d;ej`                        ZCe' G d< d=e;             ZDd@d>ZEg d?ZFy)AzPyTorch X-MOD model.    N)OptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FNgelu)CacheDynamicCacheEncoderDecoderCache)GenerationMixin)GradientCheckpointingLayer))BaseModelOutputWithPastAndCrossAttentions,BaseModelOutputWithPoolingAndCrossAttentions!CausalLMOutputWithCrossAttentionsMaskedLMOutputMultipleChoiceModelOutputQuestionAnsweringModelOutputSequenceClassifierOutputTokenClassifierOutput)PreTrainedModel)apply_chunking_to_forward find_pruneable_heads_and_indicesprune_linear_layer)auto_docstringlogging)deprecate_kwarg   )
XmodConfigc                   2     e Zd ZdZ fdZ	 ddZd Z xZS )XmodEmbeddingszV
    Same as BertEmbeddings with a tiny tweak for positional embeddings indexing.
    c                    t         |           t        j                  |j                  |j
                  |j                        | _        t        j                  |j                  |j
                        | _	        t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        j                  |j                        | _        t#        |dd      | _        | j'                  dt)        j*                  |j                        j-                  d      d       | j'                  d	t)        j.                  | j0                  j3                         t(        j4                  
      d       |j                  | _        t        j                  |j                  |j
                  | j6                        | _	        y )N)padding_idxepsposition_embedding_typeabsoluteposition_ids)r    F)
persistenttoken_type_idsdtype)super__init__r   	Embedding
vocab_sizehidden_sizepad_token_idword_embeddingsmax_position_embeddingsposition_embeddingstype_vocab_sizetoken_type_embeddings	LayerNormlayer_norm_epsDropouthidden_dropout_probdropoutgetattrr(   register_buffertorcharangeexpandzerosr*   sizelongr%   selfconfig	__class__s     d/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/xmod/modeling_xmod.pyr1   zXmodEmbeddings.__init__8   si   !||F,=,=v?Q?Q_e_r_rs#%<<0N0NPVPbPb#c %'\\&2H2H&J\J\%]" f&8&8f>S>STzz&"<"<='.v7PR\']$ELL)G)GHOOPWXej 	 	
 	ekk$*;*;*@*@*B%**Ubg 	 	

 "..#%<<**F,>,>DL\L\$
     c                    |+|t        || j                  |      }n| j                  |      }||j                         }n|j                         d d }|d   }|st	        | d      r-| j
                  d d d |f   }|j                  |d   |      }	|	}n:t        j                  |t        j                  | j                  j                        }|| j                  |      }| j                  |      }
||
z   }| j                  dk(  r| j                  |      }||z  }| j!                  |      }| j#                  |      }|S )Nr+   r    r-   r   r/   devicer)   )"create_position_ids_from_input_idsr%   &create_position_ids_from_inputs_embedsrF   hasattrr-   rD   rB   rE   rG   r*   rP   r6   r:   r(   r8   r;   r?   )rI   	input_idsr-   r*   inputs_embedspast_key_values_lengthinput_shape
seq_lengthbuffered_token_type_ids buffered_token_type_ids_expandedr:   
embeddingsr8   s                rL   forwardzXmodEmbeddings.forwardQ   sR    $A)TM]M]_uv#JJ=Y #..*K',,.s3K ^

 !t-.*.*=*=a*n*M'3J3Q3QR]^_R`bl3m0!A!&[

SWSdSdSkSk!l  00;M $ : :> J"%::
'':5"&":":<"H--J^^J/
\\*-
rM   c                    |j                         dd }|d   }t        j                  | j                  dz   || j                  z   dz   t        j                  |j
                        }|j                  d      j                  |      S )z
        We are provided embeddings directly. We cannot infer which are padded so just generate sequential position ids.

        Args:
            inputs_embeds: torch.Tensor

        Returns: torch.Tensor
        Nr+   r    rO   r   )rF   rB   rC   r%   rG   rP   	unsqueezerD   )rI   rU   rW   sequence_lengthr*   s        rL   rR   z5XmodEmbeddings.create_position_ids_from_inputs_embedsy   s     $((*3B/%a.||q /D4D4D"Dq"HPUPZPZcpcwcw
 %%a(//<<rM   )NNNNr   )__name__
__module____qualname____doc__r1   r\   rR   __classcell__rK   s   @rL   r#   r#   2   s    

4 rs&P=rM   r#   c                       e Zd Zd fd	Z eddd      	 	 	 	 	 	 ddej                  deej                     deej                     d	eej                     dee	   d
ee
   deej                     deej                     fd       Z xZS )XmodSelfAttentionc                    t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                        | _        |xs t#        |dd      | _        | j$                  dk(  s| j$                  d	k(  rF|j&                  | _        t        j(                  d
|j&                  z  dz
  | j                        | _        |j,                  | _        || _        y )Nr   embedding_sizezThe hidden size (z6) is not a multiple of the number of attention heads ()r(   r)   relative_keyrelative_key_query   r    )r0   r1   r4   num_attention_headsrS   
ValueErrorintattention_head_sizeall_head_sizer   Linearquerykeyvaluer=   attention_probs_dropout_probr?   r@   r(   r7   r2   distance_embedding
is_decoder	layer_idxrI   rJ   r(   rz   rK   s       rL   r1   zXmodSelfAttention.__init__   s    : ::a?PVXhHi#F$6$6#7 8 445Q8 
 $*#=#= #&v'9'9F<V<V'V#W !558P8PPYYv1143E3EF
99V//1C1CDYYv1143E3EF
zz&"E"EF'> (
'-zC
$ ''>9T=Y=Y]q=q+1+I+ID(&(ll1v7U7U3UXY3Y[_[s[s&tD# ++"rM   past_key_valuepast_key_values4.58new_nameversionhidden_statesattention_mask	head_maskencoder_hidden_statesoutput_attentionscache_positionreturnc                 	   |j                   \  }}	}
| j                  |      }|j                  |d| j                  | j                        j                  dd      }|d u}|St        |t              rA|j                  j                  | j                        }|r|j                  }n|j                  }n|}|r|n|}|rK|IrGj                  | j                     j                  }|j                  | j                     j                  }n| j!                  |      }|j                  |d| j                  | j                        j                  dd      }| j#                  |      }|j                  |d| j                  | j                        j                  dd      }|D|s|nd }j%                  ||| j                  d|i      \  }}|rd|j                  | j                  <   t'        j(                  ||j                  dd            }| j*                  dk(  s| j*                  dk(  r|j                   d   |j                   d   }}|Dt'        j,                  |dz
  t&        j.                  |j0                  	      j                  dd      }n@t'        j2                  |t&        j.                  |j0                  	      j                  dd      }t'        j2                  |t&        j.                  |j0                  	      j                  dd      }||z
  }| j5                  || j6                  z   dz
        }|j9                  |j:                  
      }| j*                  dk(  rt'        j<                  d||      }||z   }nE| j*                  dk(  r6t'        j<                  d||      }t'        j<                  d||      }||z   |z   }|t?        j@                  | j                        z  }|||z   }tB        jD                  jG                  |d      }| jI                  |      }|||z  }t'        j(                  ||      }|jK                  dddd      jM                         }|jO                         d d | jP                  fz   }|j                  |      }||fS )Nr+   r    rm   r   Trk   rl   rO   r.   zbhld,lrd->bhlrzbhrd,lrd->bhlrdimr   r	   ))shapert   viewrn   rq   	transpose
isinstancer   
is_updatedgetrz   cross_attention_cacheself_attention_cachelayerskeysvaluesru   rv   updaterB   matmulr(   tensorrG   rP   rC   rx   r7   tor/   einsummathsqrtr   
functionalsoftmaxr?   permute
contiguousrF   rr   )rI   r   r   r   r   r}   r   r   
batch_sizerX   _query_layeris_cross_attentionr   curr_past_key_valuecurrent_states	key_layervalue_layerattention_scoresquery_length
key_lengthposition_ids_lposition_ids_rdistancepositional_embeddingrelative_position_scoresrelative_position_scores_queryrelative_position_scores_keyattention_probscontext_layernew_context_layer_shapes                                  rL   r\   zXmodSelfAttention.forward   sN    %2$7$7!
Jjj/!&&z2t7O7OQUQiQijttq
 3$>&/+>?,77;;DNNK
%*9*O*O'*9*N*N'&5#2D.-/"=*+224>>BGGI-44T^^DKKK0I!z2t7O7OQUQiQijtt1I **^4K%**B 8 8$:R:Ri1o  *7It)<)C)C{DNN=M~<^*&	; &AEO..t~~> !<<Y5H5HR5PQ''>9T=Y=Y]q=q'2'8'8';Y__Q=O*L*!&j1nEJJWdWkWk!l!q!q" "'l%**UbUiUi!j!o!oprtu!v"\\*EJJ}OcOcdiijkmopN%6H#'#:#:8dFbFb;bef;f#g #7#:#:ARAR#:#S ++~=+0<<8H+Wk+l(#36N#N --1EE16>NP[]q1r./4||<LiYm/n,#36T#TWs#s +dii8P8P.QQ%/.@ --//0@b/I ,,7  -	9O_kB%--aAq9DDF"/"4"4"6s";t?Q?Q>S"S%**+BCo--rM   NNNNNNFN)r`   ra   rb   r1   r   rB   Tensorr   FloatTensorr   booltupler\   rd   re   s   @rL   rg   rg      s    #6 %0A6R 7;15=A+/,115d.||d. !!2!23d. E--.	d.
  ((9(9:d. "%d. $D>d. !.d. 
u||	d. Sd.rM   rg   c                   n     e Zd Z fdZdej
                  dej
                  dej
                  fdZ xZS )XmodSelfOutputc                 (   t         |           t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _        t        j                  |j                        | _
        y Nr&   )r0   r1   r   rs   r4   denser;   r<   r=   r>   r?   rH   s     rL   r1   zXmodSelfOutput.__init__  s`    YYv1163E3EF
f&8&8f>S>STzz&"<"<=rM   r   input_tensorr   c                 T    | j                  |      }| j                  |      }||z   }|S N)r   r?   )rI   r   r   s      rL   r\   zXmodSelfOutput.forward  s.    

=1]3%4rM   r`   ra   rb   r1   rB   r   r\   rd   re   s   @rL   r   r     s1    >U\\  RWR^R^ rM   r   c                       e Zd Zd fd	Zd Z eddd      	 	 	 	 	 	 ddej                  deej                     d	eej                     d
eej                     dee
   dee   deej                     deej                     fd       Z xZS )XmodAttentionc                     t         |           t        |||      | _        t	        |      | _        t               | _        |j                  | _        y )Nr(   rz   )	r0   r1   rg   rI   r   outputsetpruned_headspre_normr{   s       rL   r1   zXmodAttention.__init__   sA    %fF]irs	$V,ErM   c                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r    r   )lenr   rI   rn   rq   r   r   rt   ru   rv   r   r   rr   union)rI   headsindexs      rL   prune_headszXmodAttention.prune_heads(  s   u:?749900$))2O2OQUQbQb
u
 -TYY__eD		*499==%@		,TYY__eD		.t{{/@/@%QO )-		(E(EE
(R		%"&))"?"?$))B_B_"_		 --33E:rM   r|   r}   r~   r   r   r   r   r   r   r   r   c           	         |}| j                   r| j                  j                  |      }| j                  |||||||      }	| j                  |	d   |      }
| j                   s| j                  j                  |
      }
|
f|	dd  z   }|S )Nr   r    )r   r   r;   rI   )rI   r   r   r   r   r}   r   r   residualself_outputsattention_outputoutputss               rL   r\   zXmodAttention.forward:  s     !== KK11-@Myy!
  ;;|AA}}#{{445EF#%QR(88rM   r   r   )r`   ra   rb   r1   r   r   rB   r   r   r   r   r   r   r\   rd   re   s   @rL   r   r     s    (;$ %0A6R 7;15=A+/,115|| !!2!23 E--.	
  ((9(9: "% $D> !. 
u||	 SrM   r   c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )XmodIntermediatec                    t         |           t        j                  |j                  |j
                        | _        t        |j                  t              rt        |j                     | _        y |j                  | _        y r   )r0   r1   r   rs   r4   intermediate_sizer   r   
hidden_actstrr
   intermediate_act_fnrH   s     rL   r1   zXmodIntermediate.__init__Z  s]    YYv1163K3KL
f''-'-f.?.?'@D$'-'8'8D$rM   r   r   c                 J    | j                  |      }| j                  |      }|S r   )r   r   rI   r   s     rL   r\   zXmodIntermediate.forwardb  s&    

=100?rM   r   re   s   @rL   r   r   Y  s#    9U\\ ell rM   r   c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )XmodAdapterc                    t         |           |j                  |j                  z  | _        t        j                  |j                  | j                        | _        t        j                  | j                  |j                        | _        t        |j                  t              rt        |j                     | _        y |j                  | _        y r   )r0   r1   r4   adapter_reduction_factorbottleneck_sizer   rs   dense1dense2r   r   r   r
   adapter_act_fnrH   s     rL   r1   zXmodAdapter.__init__i  s    %11V5T5TTii 2 2D4H4HIii 4 4f6H6HIf''-"():):";D"("3"3DrM   r   r   c                 l    | j                  |      }| j                  |      }| j                  |      }|S r   )r   r   r   r   s     rL   r\   zXmodAdapter.forwards  s4    M2++M:M2rM   r   re   s   @rL   r   r   h  s#    4U\\ ell rM   r   c                        e Zd Z fdZdej
                  dej
                  dej
                  dej
                  fdZdej
                  dej
                  fdZ xZS )
XmodOutputc                    t         |           t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        |j                  | _	        t        j                  |j                        | _        |j                  r1t        j                  |j
                  |j                        | _        nd | _        |j                  | _        t        j                  i       | _        |j"                  D ]#  }t%        |      | j                   t'        |      <   % y r   )r0   r1   r   rs   r   r4   r   r;   r<   ln_before_adapterr=   r>   r?   adapter_layer_normadapter_reuse_layer_norm
ModuleDictadapter_modules	languagesr   r   )rI   rJ   languagerK   s      rL   r1   zXmodOutput.__init__{  s    YYv779K9KL
f&8&8f>S>ST!'!9!9zz&"<"<=$$&(ll63E3E6K`K`&aD#&*D#(.(G(G%!}}R0(( 	FH2=f2ED  X/	FrM   r   r   lang_idsr   c                 x    | j                  |      }| j                  |      }||z   }| j                  ||      }|S r   )r   r?   lang_adapter)rI   r   r   r   s       rL   r\   zXmodOutput.forward  s@    

=1]3%4))(MBrM   c                    t        j                  |d      \  }}| j                  s|}| j                  | j                  |      }n| j                  r| j                  |      }| j                  r|}t        j                  ||j                         d      }g }t        t        ||            D ]i  \  }\  }}	t        | j                  j                               t        |j                                  }
|j                   | j                  |
   |	             k t        j                   |d      }| j#                  |      }|z  }|S )NT)return_countsr   )rB   unique_consecutiver   r   r   r;   splittolist	enumerateziplistr   r   rp   itemappendcatr?   )rI   r   r   lang_lengthsr   split_hidden_stateslang_wise_outputsilang_idsplit_hidden_statelangs              rL   r   zXmodOutput.lang_adapter  s1   !&!9!9(RV!W,%%$H"". 33MBM** NN=9M!!$H#kk-9L9L9NPQR09#hH[:\0] 	U,A,+,,1134S5HID$$%?T%9%9$%?@R%ST	U 		"3Q7]3!rM   )	r`   ra   rb   r1   rB   r   r\   r   rd   re   s   @rL   r   r   z  s[    FU\\  Y^YeYe jojvjv U\\ %,, rM   r   c                   R    e Zd Zd fd	Z eddd      	 	 	 	 	 	 	 ddej                  dej                  deej                     d	eej                     d
eej                     deej                     dee	   dee
   deej                     deej                     fd       Zd Z xZS )	XmodLayerc                    t         |           |j                  | _        d| _        t	        ||      | _        |j                  | _        |j                  | _        | j                  r-| j                  st        |  d      t	        |d|      | _	        t        |      | _        t        |      | _        |j                  | _        y )Nr    rz   z> should be used as a decoder model if cross attention is addedr)   r   )r0   r1   chunk_size_feed_forwardseq_len_dimr   	attentionry   add_cross_attentionro   crossattentionr   intermediater   r   r   )rI   rJ   rz   rK   s      rL   r1   zXmodLayer.__init__  s    '-'E'E$&vC ++#)#=#= ##?? D6)g!hii"/PZfo"pD,V4 (rM   r|   r}   r~   r   r   r   r   r   r   encoder_attention_maskr   r   r   c
           	      
   | j                  ||||||	      }
|
d   }|
dd  }| j                  rB|@t        | d      st        d|  d      | j	                  |||||||	      }|d   }||dd  z   }|}| j
                  r| j                  j                  |      }t        | j                  | j                  | j                  |      }| j                  |||      }| j
                  s| j                  j                  |      }|f|z   S )N)r   r   r   r}   r   r   r    r  z'If `encoder_hidden_states` are passed, z` has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`)r   r   r   r}   r   r   )r  ry   rS   ro   r  r   r   r;   r   feed_forward_chunkr  r  )rI   r   r   r   r   r   r  r}   r   r   self_attention_outputsr   r   cross_attention_outputsr   intermediate_outputlayer_outputs                    rL   r\   zXmodLayer.forward  sP    "&)/+) "0 "
 2!4(,??4@4!12 =dV DD D 
 '+&9&9 5#&; /"3- ': '#  7q9 7 ;;G#==#{{445EF7##((	
 {{#6(K}};;00>L((rM   c                 $    | j                  |      S r   )r  )rI   r   s     rL   r  zXmodLayer.feed_forward_chunk  s      !122rM   r   )NNNNNFN)r`   ra   rb   r1   r   rB   r   r   r   r   r   r   r\   r  rd   re   s   @rL   r  r    s    ( %0A6R
 7;15=A>B+/,1156)||6) ,,6) !!2!23	6)
 E--.6)  ((9(9:6) !)):): ;6) "%6) $D>6) !.6) 
u||	6) S6)p3rM   r  c                   |    e Zd Z fdZ	 	 	 	 	 	 	 	 	 	 ddej
                  dej
                  deej                     deej                     deej                     deej                     deeeej                           d	ee	   d
ee	   dee	   dee	   deej
                     de
eej
                     ef   fdZ xZS )XmodEncoderc           	      n   t         |           || _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        |j                  | _
        | j                  r0t        j                  |j                  |j                        | _        d| _        y c c}w )Nr  r&   F)r0   r1   rJ   r   
ModuleListrangenum_hidden_layersr  layerr   is_pre_normr;   r4   r<   gradient_checkpointing)rI   rJ   r  rK   s      rL   r1   zXmodEncoder.__init__  s    ]]ERXRjRjLk#lqIf$B#lm
!??\\&*<*<&BWBWXDN&+#	 $ms   B2r   r   r   r   r   r  r}   	use_cacher   output_hidden_statesreturn_dictr   r   c                    | j                   r%| j                  r|rt        j                  d       d}|r6|4t	        t        | j                        t        | j                              }|r:t        |t              r*t        j                  d       t	        j                  |      }|
rdnd }|	rdnd }|	r| j                  j                  rdnd }t        | j                        D ]W  \  }}|
r||fz   }|||   nd } |||||||||	|	      }|d   }|	s/||d   fz   }| j                  j                  sO||d   fz   }Y | j                  r| j                  |      }|
r||fz   }|st        d	 |||||fD              S t        |||||
      S )NzZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...F)rJ   zPassing a tuple of `past_key_values` is deprecated and will be removed in Transformers v4.58.0. You should pass an instance of `EncoderDecoderCache` instead, e.g. `past_key_values=EncoderDecoderCache.from_legacy_cache(past_key_values)`. r   r    rm   c              3   $   K   | ]  }|| 
 y wr   r*  ).0vs     rL   	<genexpr>z&XmodEncoder.forward.<locals>.<genexpr>F  s      
 = 
s   )last_hidden_stater}   r   
attentionscross_attentions)r%  trainingloggerwarning_oncer   r   rJ   r   r   from_legacy_cacher  r   r#  r$  r;   r   )rI   r   r   r   r   r   r  r}   r&  r   r'  r(  r   all_hidden_statesall_self_attentionsall_cross_attentionsr  layer_modulelayer_head_masklayer_outputss                       rL   r\   zXmodEncoder.forward  s    &&4==##p "	01,dkk2RT`hlhshsTtuOOU;\
 2CCOTO"6BD$5b4%64;;;Z;Zr`d(4 	VOA|#$58H$H!.7.CilO(%&!
M *!,M &9]1=M<O&O#;;22+?=QRCSBU+U(-	V0  NN=9M 1]4D D 
 "#%'(
 
 
 9+++*1
 	
rM   )
NNNNNNFFTN)r`   ra   rb   r1   rB   r   r   r   r   r   r   r   r\   rd   re   s   @rL   r  r    s1   , 7;15=A>BEI$(,1/4&*15T
||T
 ,,T
 !!2!23	T

 E--.T
  ((9(9:T
 !)):): ;T
 "%e.?.?(@"ABT
 D>T
 $D>T
 'tnT
 d^T
 !.T
 
uU\\"$MM	NT
rM   r  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )
XmodPoolerc                     t         |           t        j                  |j                  |j                        | _        t        j                         | _        y r   )r0   r1   r   rs   r4   r   Tanh
activationrH   s     rL   r1   zXmodPooler.__init__\  s9    YYv1163E3EF
'')rM   r   r   c                 \    |d d df   }| j                  |      }| j                  |      }|S Nr   )r   r@  )rI   r   first_token_tensorpooled_outputs       rL   r\   zXmodPooler.forwarda  s6     +1a40

#566rM   r   re   s   @rL   r=  r=  [  s#    $
U\\ ell rM   r=  c                   8    e Zd ZU eed<   dZdZd ZdefdZ	d Z
y)	XmodPreTrainedModelrJ   robertaTc                 l   t        |t        j                        rm|j                  j                  j                  d| j                  j                         |j                  %|j                  j                  j                          yyt        |t        j                        rz|j                  j                  j                  d| j                  j                         |j                  2|j                  j                  |j                     j                          yyt        |t        j                        rJ|j                  j                  j                          |j                  j                  j                  d       yt        |t              r%|j                  j                  j                          yy)zInitialize the weightsg        )meanstdNg      ?)r   r   rs   weightdatanormal_rJ   initializer_rangebiaszero_r2   r%   r;   fill_
XmodLMHead)rI   modules     rL   _init_weightsz!XmodPreTrainedModel._init_weightsq  s&   fbii( MM&&CT[[5R5R&S{{&  &&( '-MM&&CT[[5R5R&S!!-""6#5#56<<> .-KK""$MM$$S)
+KK""$ ,rM   r   c           	          || j                   j                  vr0t        |  d| dt        | j                   j                               || j                   _        y)z
        Set the default language code for the model. This is used when the language is not specified in the input.

        Args:
            language (`str`): The language code, such as `"en_XX"` or `"de_DE"`.
        z does not have an adapter for z. Supported languages: N)rJ   r   ro   r   default_language)rI   r   s     rL   set_default_languagez(XmodPreTrainedModel.set_default_language  s[     4;;000&6xj@WX\]a]h]h]r]rXsWtu  (0$rM   c                    t         j                  d       | j                  j                  j	                         D ]	  }d|_         t         j                  d       | j                  j                  j                  D ]x  }|j                  j                  0|j                  j                  j	                         D ]	  }d|_         |j                  j                  j	                         D ]	  }d|_         z y)z
        Freeze the embeddings and language adapters of the model. Usually, this is applied before the model is
        fine-tuned on a downstream task.
        zFreezing embeddingsFzFreezing adaptersN)r3  inforG  r[   
parametersrequires_gradencoderr#  r   r   r   )rI   	parameterr#  s      rL   'freeze_embeddings_and_language_adaptersz;XmodPreTrainedModel.freeze_embeddings_and_language_adapters  s    
 	)*00;;= 	,I&+I#	,'(\\))// 	0E||..:!&!@!@!K!K!M 4I.3I+4"\\99DDF 0	*/	'0		0rM   N)r`   ra   rb   r!   __annotations__base_model_prefixsupports_gradient_checkpointingrT  r   rW  r^  r*  rM   rL   rF  rF  j  s*    !&*#%$0S 00rM   rF  a0  
    The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
    cross-attention is added between the self-attention layers, following the architecture described in *Attention is
    all you need*_ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. Gomez, Lukasz
    Kaiser and Illia Polosukhin.

    To behave as an decoder the model needs to be initialized with the `is_decoder` argument of the configuration set
    to `True`. To be used in a Seq2Seq model, the model needs to initialized with both `is_decoder` argument and
    `add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.

    .. _*Attention is all you need*: https://huggingface.co/papers/1706.03762
    )custom_introc            $           e Zd Zd fd	Zd Zd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     dee	j                     deee	j                        dee   dee   dee   dee   dee	j                     deee	j                     ef   f d       Z xZS )	XmodModelc                     t         |   |       || _        t        |      | _        t        |      | _        |rt        |      nd| _        | j                          y)zv
        add_pooling_layer (bool, *optional*, defaults to `True`):
            Whether to add a pooling layer
        N)
r0   r1   rJ   r#   r[   r  r\  r=  pooler	post_init)rI   rJ   add_pooling_layerrK   s      rL   r1   zXmodModel.__init__  sM    
 	 (0"6*,=j(4 	rM   c                 .    | j                   j                  S r   r[   r6   rI   s    rL   get_input_embeddingszXmodModel.get_input_embeddings  s    ...rM   c                 &    || j                   _        y r   rj  )rI   rv   s     rL   set_input_embeddingszXmodModel.set_input_embeddings  s    */'rM   c                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsr\  r#  r  r   )rI   heads_to_pruner#  r   s       rL   _prune_headszXmodModel._prune_heads  sE    
 +002 	CLE5LLu%//;;EB	CrM   rT   r   r   r-   r*   r   rU   r   r  r}   r&  r   r'  r(  r   r   c                 ,   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }| j                   j                  r||n| j                   j
                  }nd}||t        d      |#| j                  ||       |j                         }n!||j                         dd }nt        d      |\  }}||j                  n|j                  }d}|
5t        |
t              s|
d   d   j                  d   n|
j                         }|| j                   j                  t        d      t        | j                   j"                  d   j$                  j&                  j)                               }|j+                  | j                   j                        }|t-        j.                  ||	      z  }|t-        j.                  |||z   f|	      }|pt1        | j2                  d
      r4| j2                  j4                  ddd|f   }|j7                  ||      }|}n&t-        j8                  |t,        j:                  |      }| j=                  ||      }| j                   j                  rE|C|j                         \  }}}||f}|	t-        j.                  ||	      }	| j?                  |	      }nd}| jA                  || j                   jB                        }| j3                  |||||      }| j!                  |||||||
|||||      } | d   }!| jD                  | jE                  |!      nd}"|s
|!|"f| dd z   S tG        |!|"| jH                  | jJ                  | jL                  | jN                        S )  
        lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        NFzDYou cannot specify both input_ids and inputs_embeds at the same timer+   z5You have to specify either input_ids or inputs_embedsr   r   zPInput language unknown. Please call `XmodPreTrainedModel.set_default_language()`)rP   r-   rO   )rT   r*   r-   rU   rV   )r   r   r   r   r  r}   r&  r   r'  r(  r   r    )r/  pooler_outputr}   r   r0  r1  )(rJ   r   r'  use_return_dictry   r&  ro   %warn_if_padding_and_no_attention_maskrF   rP   r   r   r   get_seq_lengthrV  r   r\  r#  r   r   r   r   rB   onesrS   r[   r-   rD   rE   rG   get_extended_attention_maskinvert_attention_maskget_head_maskr"  rf  r   r}   r   r0  r1  )#rI   rT   r   r   r-   r*   r   rU   r   r  r}   r&  r   r'  r(  r   rW   r   rX   rP   rV   adapter_languagesdefault_lang_idrY   rZ   extended_attention_maskencoder_batch_sizeencoder_sequence_lengthr   encoder_hidden_shapeencoder_extended_attention_maskembedding_outputencoder_outputssequence_outputrD  s#                                      rL   r\   zXmodModel.forward  s   0 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B];;!!%.%:	@U@UII ]%>cdd"66y.Q#..*K&',,.s3KTUU!,
J%.%:!!@T@T!"& "/59  "1%++B/$335 # {{++3 !stt $T\\%7%7%:%A%A%Q%Q%V%V%X Y/55dkk6R6RSO&Jv)NNH!"ZZ*jCY6Y)ZdjkN!t(89*.//*H*HKZK*X'3J3Q3QR\^h3i0!A!&[

SY!Z 150P0PQ_al0m ;;!!&;&G=R=W=W=Y: 7$68O#P %-).4HQW)X&.2.H.HI_.`+.2+ &&y$++2O2OP	??%)'#9 + 
 ,,2"7#B+/!5#) ' 
 *!,8<8OO4UY#]3oab6III;-'+;;)77&11,==
 	
rM   )T)NNNNNNNNNNNNNNN)r`   ra   rb   r1   rl  rn  rr  r   r   rB   r   
LongTensorr   r   r   r   r   r   r\   rd   re   s   @rL   rd  rd    s    "/0C  -1/31515/3,0048<9==A$(,0/3&*15!A
ELL)A
 5++,A
 !.	A

 !.A
 u||,A
 ELL)A
  -A
  (5A
 !) 6A
 "$u'8'8"9:A
 D>A
 $D>A
 'tnA
 d^A
  !.!A
" 
uU\\"$PP	Q#A
 A
rM   rd  zQ
    X-MOD Model with a `language modeling` head on top for CLM fine-tuning.
    c            &       *    e Zd ZddgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     dee	j                     dee	j                     dee	j                     deeee	j                           dee   dee   dee   dee   dee	j                     deee	j                     ef   f"d       Z xZS )XmodForCausalLMlm_head.decoder.weightlm_head.decoder.biasc                     t         |   |       |j                  st        j	                  d       t        |d      | _        t        |      | _        | j                          y )NzLIf you want to use `XmodLMHeadModel` as a standalone, add `is_decoder=True.`Frh  
r0   r1   ry   r3  warningrd  rG  rR  lm_headrg  rH   s     rL   r1   zXmodForCausalLM.__init__`  sL       NNij 5A!&) 	rM   c                 .    | j                   j                  S r   r  decoderrk  s    rL   get_output_embeddingsz%XmodForCausalLM.get_output_embeddingsm      ||###rM   c                 &    || j                   _        y r   r  rI   new_embeddingss     rL   set_output_embeddingsz%XmodForCausalLM.set_output_embeddingsq      -rM   rT   r   r   r-   r*   r   rU   r   r  labelsr}   r&  r   r'  r(  r   r   c                    ||n| j                   j                  }|
d}| j                  |||||||||	||||||      }|d   }| j                  |      }d}|
* | j                  ||
fd| j                   j
                  i|}|s|f|dd z   }||f|z   S |S t        |||j                  |j                  |j                  |j                        S )aS  
        lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
            `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are
            ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`

        Example:

        ```python
        >>> from transformers import AutoTokenizer, XmodForCausalLM, AutoConfig
        >>> import torch

        >>> tokenizer = AutoTokenizer.from_pretrained("FacebookAI/xlm-roberta-base")
        >>> config = AutoConfig.from_pretrained("facebook/xmod-base")
        >>> config.is_decoder = True
        >>> model = XmodForCausalLM.from_pretrained("facebook/xmod-base", config=config)
        >>> model.set_default_language("en_XX")

        >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
        >>> outputs = model(**inputs)

        >>> prediction_logits = outputs.logits
        ```NF)r   r   r-   r*   r   rU   r   r  r}   r&  r   r'  r(  r   r   r3   rm   )losslogitsr}   r   r0  r1  )rJ   rv  rG  r  loss_functionr3   r   r}   r   r0  r1  )rI   rT   r   r   r-   r*   r   rU   r   r  r  r}   r&  r   r'  r(  r   kwargsr   r  prediction_scoreslm_lossr   s                          rL   r\   zXmodForCausalLM.forwardt  s'   ^ &1%<k$++B]B]I,,))%'"7#9+/!5#)  
$ "!* LL9(d((!  ;;11 	G ')GABK7F,3,?WJ'KVK0$#33!//))$55
 	
rM   )NNNNNNNNNNNNNNNN)r`   ra   rb   _tied_weights_keysr1   r  r  r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r  W  s    34JK
$.  15/36:59371559=A>B-1EI$(,0/3&*15#[
E,,-[
 5++,[
 !!2!23	[

 !!1!12[
 u//0[
 E--.[
   1 12[
  ((9(9:[
 !)):): ;[
 ))*[
 "%e.?.?(@"AB[
 D>[
 $D>[
 'tn[
  d^![
" !.#[
& 
uU\\"$EE	F'[
 [
rM   r  c                        e Zd ZddgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     dee	j                     dee	j                     dee	j                     dee   dee   dee   deee	j                     ef   fd       Z xZS )XmodForMaskedLMr  r  c                     t         |   |       |j                  rt        j	                  d       t        |d      | _        t        |      | _        | j                          y )NzkIf you want to use `XmodForMaskedLM` make sure `config.is_decoder=False` for bi-directional self-attention.Fr  r  rH   s     rL   r1   zXmodForMaskedLM.__init__  sR     NN1
 !5A!&) 	rM   c                 .    | j                   j                  S r   r  rk  s    rL   r  z%XmodForMaskedLM.get_output_embeddings  r  rM   c                 &    || j                   _        y r   r  r  s     rL   r  z%XmodForMaskedLM.set_output_embeddings  r  rM   rT   r   r   r-   r*   r   rU   r   r  r  r   r'  r(  r   c                    ||n| j                   j                  }| j                  |||||||||	|||      }|d   }| j                  |      }d}|
Ft	               } ||j                  d| j                   j                        |
j                  d            }|s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a  
        lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
            config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the
            loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`
        N)r   r   r-   r*   r   rU   r   r  r   r'  r(  r   r+   rm   r  r  r   r0  )
rJ   rv  rG  r  r   r   r3   r   r   r0  )rI   rT   r   r   r-   r*   r   rU   r   r  r  r   r'  r(  r   r  r  masked_lm_lossloss_fctr   s                       rL   r\   zXmodForMaskedLM.forward  s   4 &1%<k$++B]B],,))%'"7#9/!5#  
 "!* LL9')H%&7&<&<RAWAW&XZ`ZeZefhZijN')GABK7F3A3M^%.YSYY$!//))	
 	
rM   )NNNNNNNNNNNNN)r`   ra   rb   r  r1   r  r  r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r    sr   24JK $.  15/36:59371559=A>B-1,0/3&*:
E,,-:
 5++,:
 !!2!23	:

 !!1!12:
 u//0:
 E--.:
   1 12:
  ((9(9::
 !)):): ;:
 ))*:
 $D>:
 'tn:
 d^:
 
uU\\"N2	3:
 :
rM   r  c                   .     e Zd ZdZ fdZd Zd Z xZS )rR  z*Roberta Head for masked language modeling.c                    t         |           t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _
        t        j                  t        j                  |j                              | _        | j                  | j                  _        y r   )r0   r1   r   rs   r4   r   r;   r<   
layer_normr3   r  	ParameterrB   rE   rO  rH   s     rL   r1   zXmodLMHead.__init__1  s    YYv1163E3EF
,,v'9'9v?T?TUyy!3!3V5F5FGLLV->->!?@	 IIrM   c                     | j                  |      }t        |      }| j                  |      }| j                  |      }|S r   )r   r   r  r  rI   featuresr  xs       rL   r\   zXmodLMHead.forward:  s;    JJx GOOA LLOrM   c                     | j                   j                  j                  j                  dk(  r| j                  | j                   _        y | j                   j                  | _        y )Nmeta)r  rO  rP   typerk  s    rL   _tie_weightszXmodLMHead._tie_weightsD  sC     <<##((F2 $		DLL))DIrM   )r`   ra   rb   rc   r1   r\   r  rd   re   s   @rL   rR  rR  .  s    4&*rM   rR  z
    X-MOD Model transformer with a sequence classification/regression head on top (a linear layer on top of the pooled
    output) e.g. for GLUE tasks.
    c                   ~    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	eej                     d
ee	   dee	   dee	   de
eej                     ef   fd       Z xZS )XmodForSequenceClassificationc                     t         |   |       |j                  | _        || _        t	        |d      | _        t        |      | _        | j                          y NFr  )	r0   r1   
num_labelsrJ   rd  rG  XmodClassificationHead
classifierrg  rH   s     rL   r1   z&XmodForSequenceClassification.__init__U  sJ      ++ 5A08 	rM   rT   r   r   r-   r*   r   rU   r  r   r'  r(  r   c                     ||n| j                   j                  }| j                  ||||||||	|
|
      }|d   }| j                  |      }d}|| j                   j                  | j
                  dk(  rd| j                   _        nl| j
                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _        nd| j                   _        | j                   j                  dk(  rIt               }| j
                  dk(  r& ||j                         |j                               }n |||      }n| j                   j                  dk(  r=t               } ||j                  d| j
                        |j                  d            }n,| j                   j                  dk(  rt               } |||      }|s|f|d	d z   }||f|z   S |S t        |||j                   |j"                  
      S )a  
        lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        N	r   r   r-   r*   r   rU   r   r'  r(  r   r    
regressionsingle_label_classificationmulti_label_classificationr+   rm   r  )rJ   rv  rG  r  problem_typer  r/   rB   rG   rp   r   squeezer   r   r   r   r   r0  rI   rT   r   r   r-   r*   r   rU   r  r   r'  r(  r   r  r  r  r  r   s                     rL   r\   z%XmodForSequenceClassification.forward`  s   0 &1%<k$++B]B],,))%'/!5#  
 "!*1{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE'!//))	
 	
rM   NNNNNNNNNNN)r`   ra   rb   r1   r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r  M  s;   	  15/36:59371559-1,0/3&*H
E,,-H
 5++,H
 !!2!23	H

 !!1!12H
 u//0H
 E--.H
   1 12H
 ))*H
 $D>H
 'tnH
 d^H
 
uU\\"$<<	=H
 H
rM   r  c                   ~    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	eej                     d
ee	   dee	   dee	   de
eej                     ef   fd       Z xZS )XmodForMultipleChoicec                     t         |   |       t        |      | _        t	        j
                  |j                        | _        t	        j                  |j                  d      | _
        | j                          y )Nr    )r0   r1   rd  rG  r   r=   r>   r?   rs   r4   r  rg  rH   s     rL   r1   zXmodForMultipleChoice.__init__  sV      (zz&"<"<=))F$6$6: 	rM   rT   r   r-   r   r  r*   r   rU   r   r'  r(  r   c                    ||n| j                   j                  }||j                  d   n|j                  d   }|!|j                  d|j	                  d            nd}|2|j                  |j	                  d      |j	                  d      z        nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|1|j                  d|j	                  d      |j	                  d            nd}| j                  ||||||||	|
|
      }|d   }| j                  |      }| j                  |      }|j                  d|      }d}|t               } |||      }|s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )	a|  
        input_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`):
            Indices of input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        lang_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        token_type_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
            1]`:

            - 0 corresponds to a *sentence A* token,
            - 1 corresponds to a *sentence B* token.

            [What are token type IDs?](../glossary#token-type-ids)
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
            num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
            `input_ids` above)
        position_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
            config.max_position_embeddings - 1]`.

            [What are position IDs?](../glossary#position-ids)
        inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_choices, sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
            is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
            model's internal embedding lookup matrix.
        Nr    r+   r   r   )	r   r*   r-   r   r   rU   r   r'  r(  rm   r  )rJ   rv  r   r   rF   repeatrG  r?   r  r   r   r   r0  )rI   rT   r   r-   r   r  r*   r   rU   r   r'  r(  num_choicesflat_input_idsflat_lang_idsflat_position_idsflat_token_type_idsflat_attention_maskflat_inputs_embedsr   rD  r  reshaped_logitsr  r  r   s                             rL   r\   zXmodForMultipleChoice.forward  s   ` &1%<k$++B]B],5,Aiooa(}GZGZ[\G]CLCXINN2,>?^bRZRf	q(9INN1<M(MNlpLXLdL--b,2C2CB2GHjnR`Rln11"n6I6I"6MNrvR`Rln11"n6I6I"6MNrv ( r=#5#5b#9=;M;Mb;QR 	 ,,"*..,/!5#  
  
]3/ ++b+6')HOV4D%''!"+5F)-)9TGf$EvE("!//))	
 	
rM   r  )r`   ra   rb   r1   r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r    s;     15/3596:-1371559,0/3&*]
E,,-]
 5++,]
 !!1!12	]

 !!2!23]
 ))*]
 u//0]
 E--.]
   1 12]
 $D>]
 'tn]
 d^]
 
uU\\"$==	>]
 ]
rM   r  c                   ~    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	eej                     d
ee	   dee	   dee	   de
eej                     ef   fd       Z xZS )XmodForTokenClassificationc                 d   t         |   |       |j                  | _        t        |d      | _        |j
                  |j
                  n|j                  }t        j                  |      | _	        t        j                  |j                  |j                        | _        | j                          y r  )r0   r1   r  rd  rG  classifier_dropoutr>   r   r=   r?   rs   r4   r  rg  rI   rJ   r  rK   s      rL   r1   z#XmodForTokenClassification.__init__  s      ++ 5A)/)B)B)NF%%TZTnTn 	 zz"45))F$6$68I8IJ 	rM   rT   r   r   r-   r*   r   rU   r  r   r'  r(  r   c                    ||n| j                   j                  }| j                  ||||||||	|
|
      }|d   }| j                  |      }| j	                  |      }d}|<t               } ||j                  d| j                        |j                  d            }|s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a  
        lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Indices of the language adapters that should be activated for each sample, respectively. Default: the index
            that corresponds to `self.config.default_language`.
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`.
        Nr  r   r+   rm   r  )rJ   rv  rG  r?   r  r   r   r  r   r   r0  r  s                     rL   r\   z"XmodForTokenClassification.forward+  s    , &1%<k$++B]B],,))%'/!5#  
 "!*,,71')HFKKDOO<fkk"oNDY,F)-)9TGf$EvE$!//))	
 	
rM   r  )r`   ra   rb   r1   r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r    s-     15/36:59371559-1,0/3&*7
E,,-7
 5++,7
 !!2!23	7

 !!1!127
 u//07
 E--.7
   1 127
 ))*7
 $D>7
 'tn7
 d^7
 
uU\\"$99	:7
 7
rM   r  c                   (     e Zd ZdZ fdZd Z xZS )r  z-Head for sentence-level classification tasks.c                 Z   t         |           t        j                  |j                  |j                        | _        |j                  |j                  n|j                  }t        j                  |      | _	        t        j                  |j                  |j                        | _        y r   )r0   r1   r   rs   r4   r   r  r>   r=   r?   r  out_projr  s      rL   r1   zXmodClassificationHead.__init__j  s    YYv1163E3EF
)/)B)B)NF%%TZTnTn 	 zz"45		&"4"4f6G6GHrM   c                     |d d dd d f   }| j                  |      }| j                  |      }t        j                  |      }| j                  |      }| j	                  |      }|S rB  )r?   r   rB   tanhr  r  s       rL   r\   zXmodClassificationHead.forwards  sY    Q1WLLOJJqMJJqMLLOMM!rM   )r`   ra   rb   rc   r1   r\   rd   re   s   @rL   r  r  g  s    7IrM   r  c                       e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	eej                     d
eej                     dee	   dee	   dee	   de
eej                     ef   fd       Z xZS )XmodForQuestionAnsweringc                     t         |   |       |j                  | _        t        |d      | _        t        j                  |j                  |j                        | _        | j                          y r  )
r0   r1   r  rd  rG  r   rs   r4   
qa_outputsrg  rH   s     rL   r1   z!XmodForQuestionAnswering.__init__  sU      ++ 5A))F$6$68I8IJ 	rM   rT   r   r   r-   r*   r   rU   start_positionsend_positionsr   r'  r(  r   c                 *   ||n| j                   j                  }| j                  ||||||||
||
      }|d   }| j                  |      }|j	                  dd      \  }}|j                  d      j                         }|j                  d      j                         }d}||	t        |j                               dkD  r|j                  d      }t        |	j                               dkD  r|	j                  d      }	|j                  d      }|j                  d|      }|	j                  d|      }	t        |      } |||      } |||	      }||z   dz  }|s||f|dd z   }||f|z   S |S t        ||||j                  |j                  	      S )
rt  Nr  r   r    r+   r   )ignore_indexrm   )r  start_logits
end_logitsr   r0  )rJ   rv  rG  r  r   r  r   r   rF   clampr   r   r   r0  )rI   rT   r   r   r-   r*   r   rU   r  r  r   r'  r(  r   r  r  r  r  
total_lossignored_indexr  
start_lossend_lossr   s                           rL   r\   z XmodForQuestionAnswering.forward  s   * &1%<k$++B]B],,))%'/!5#  
 "!*1#)<<r<#: j#++B/::<''+668

&=+D?'')*Q."1"9"9""==%%'(1, - 5 5b 9(--a0M-33A}EO)//=AM']CH!,@J
M:H$x/14J"J/'!"+=F/9/EZMF*Q6Q+%!!//))
 	
rM   )NNNNNNNNNNNN)r`   ra   rb   r1   r   r   rB   r  r   r   r   r   r   r   r\   rd   re   s   @rL   r  r  }  sT     15/36:593715596:48,0/3&*E
E,,-E
 5++,E
 !!2!23	E

 !!1!12E
 u//0E
 E--.E
   1 12E
 "%"2"23E
   0 01E
 $D>E
 'tnE
 d^E
 
uU\\"$@@	AE
 E
rM   r  c                     | j                  |      j                         }t        j                  |d      j	                  |      |z   |z  }|j                         |z   S )a  
    Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols
    are ignored. This is modified from fairseq's `utils.make_positions`.

    Args:
        x: torch.Tensor x:

    Returns: torch.Tensor
    r    r   )nerp   rB   cumsumtype_asrG   )rT   r%   rV   maskincremental_indicess        rL   rQ   rQ     sW     <<$((*D <<!4<<TBE[[_cc##%33rM   )r  r  r  r  r  r  rd  rF  )r   )Grc   r   typingr   r   rB   torch.utils.checkpointr   torch.nnr   r   r   activationsr
   r   cache_utilsr   r   r   
generationr   modeling_layersr   modeling_outputsr   r   r   r   r   r   r   r   modeling_utilsr   pytorch_utilsr   r   r   utilsr   r   utils.deprecationr   configuration_xmodr!   
get_loggerr`   r3  Moduler#   rg   r   r   r   r   r   r  r  r=  rF  rd  r  r  rR  r  r  r  r  r  rQ   __all__r*  rM   rL   <module>r     sX     "    A A ' C C ) 9	 	 	 . l l , 0 * 
		H	%V=RYY V=tA.		 A.HRYY 6BII 6tryy ")) $/ /dJ3* J3Z^
")) ^
D  30/ 30 30l e
# e
e
P 
t
)? t

t
n V
) V
 V
t* *> V
$7 V
V
r j
/ j
 j
Z H
!4 H
 H
XRYY , R
2 R
 R
l4 	rM   