
    hI                     B   d Z ddlZddlZddlmZmZ ddlZddlZddlmZ ddl	m
Z
mZmZ ddlmZ ddlmZmZmZ dd	lmZ dd
lmZ ddlmZmZmZmZmZmZmZmZ ddl m!Z! ddl"m#Z#m$Z$m%Z% ddl&m'Z'm(Z( ddl)m*Z* ddl+m,Z,  e(jZ                  e.      Z/d Z0 G d dejb                        Z2 G d dejb                        Z3 G d dejb                        Z4 G d dejb                        Z5 G d dejb                        Z6 G d dejb                        Z7 G d d ejb                        Z8 G d! d"e      Z9 G d# d$ejb                        Z: G d% d&ejb                        Z; G d' d(ejb                        Z< G d) d*ejb                        Z=e' G d+ d,e!             Z> e'd-.       G d/ d0e>             Z?e' G d1 d2e>             Z@ e'd3.       G d4 d5e>e             ZA e'd6.       G d7 d8e>             ZBe' G d9 d:e>             ZCe' G d; d<e>             ZDe' G d= d>e>             ZEg d?ZFy)@zPyTorch RemBERT model.    N)OptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FN)CacheDynamicCacheEncoderDecoderCache)GenerationMixin)GradientCheckpointingLayer))BaseModelOutputWithPastAndCrossAttentions,BaseModelOutputWithPoolingAndCrossAttentions!CausalLMOutputWithCrossAttentionsMaskedLMOutputMultipleChoiceModelOutputQuestionAnsweringModelOutputSequenceClassifierOutputTokenClassifierOutput)PreTrainedModel)apply_chunking_to_forward find_pruneable_heads_and_indicesprune_linear_layer)auto_docstringlogging)deprecate_kwarg   )RemBertConfigc           
         	 ddl }ddl}ddl}t        j                  j                  |      }t        j                  d|        |j                  j                  |      }g }g }	|D ]s  \  }
t        fddD              rt        j                  d d|
        |j                  j                  |      }|j                         |	j                  |       u t        ||	      D ]  \  }j!                  d	d
      j#                  d      t        d D              r(t        j                  ddj%                                d| }D ]  }|j'                  d|      r|j#                  d|      }n|g}|d   dk(  s|d   dk(  rt)        |d      }nW|d   dk(  s|d   dk(  rt)        |d      }n:|d   dk(  rt)        |d      }n%|d   dk(  rt)        |d      }n	 t)        ||d         }t/        |      dk\  st1        |d         }||   } dd dk(  rt)        |d      }n|dk(  r|j3                  |      }	 |j4                  |j4                  k7  r&t7        d|j4                   d|j4                   d       	 t        j                  d!        t=        j>                  |      |_          | S # t        $ r t        j                  d        w xY w# t*        $ r7 t        j                  dj-                  dj%                                     Y w xY w# t8        $ r1}|xj:                  |j4                  |j4                  fz  c_         d}~ww xY w)"z'Load tf checkpoints in a pytorch model.r   NzLoading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see https://www.tensorflow.org/install/ for installation instructions.z&Converting TensorFlow checkpoint from c              3   &   K   | ]  }|v  
 y wN ).0denynames     j/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/transformers/models/rembert/modeling_rembert.py	<genexpr>z-load_tf_weights_in_rembert.<locals>.<genexpr>H   s     Xtt|Xs   )adam_vadam_moutput_embeddingclszLoading TF weight z with shape zbert/zrembert//c              3   $   K   | ]  }|d v  
 yw))r*   r+   AdamWeightDecayOptimizerAdamWeightDecayOptimizer_1global_stepNr$   )r%   ns     r(   r)   z-load_tf_weights_in_rembert.<locals>.<genexpr>Y   s      
 nn
   z	Skipping z[A-Za-z]+_\d+z_(\d+)kernelgammaweightoutput_biasbetabiasoutput_weightssquad
classifierzSkipping {}   r   i_embeddingszPointer shape z and array shape z mismatchedzInitialize PyTorch weight )!renumpy
tensorflowImportErrorloggererrorospathabspathinfotrainlist_variablesanyload_variableappendzipreplacesplitjoin	fullmatchgetattrAttributeErrorformatlenint	transposeshape
ValueErrorAssertionErrorargstorch
from_numpydata)modelconfigtf_checkpoint_pathr@   nptftf_path	init_varsnamesarraysrZ   arraypointerm_namescope_namesnumer'   s                    @r(   load_tf_weights_in_rembertrp   2   sV   
 ggoo01G
KK8	BC''0IEF  	e X(WXX(l5'BC&&w5Te	 5&) 1/e||GZ0 zz#  

 
 KK)CHHTN#345 	'F||,f5 hhy&9%h1~)[^w-F!'84Q=0KNf4L!'62Q#33!'84Q7*!'<8%g{1~>G ;1$+a.)!#,+	', #$<=(gx0GxLL'E	}}+ >'--@QRWR]R]Q^^i!jkk ,
 	078''.c1/d LS  Q	
 	n & KK 4 4SXXd^ DE  	FFw}}ekk22F	s5   J7 !K ?L7 K<LL	M&,MMc                        e Zd ZdZ fdZ	 	 	 	 	 d
deej                     deej                     deej                     deej                     de	dej                  fd	Z xZS )RemBertEmbeddingszGConstruct the embeddings from word, position and token_type embeddings.c                 |   t         |           t        j                  |j                  |j
                  |j                        | _        t        j                  |j                  |j
                        | _	        t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        j                  |j                        | _        | j#                  dt%        j&                  |j                        j)                  d      d       y )N)padding_idxepsposition_ids)r   F)
persistent)super__init__r   	Embedding
vocab_sizeinput_embedding_sizepad_token_idword_embeddingsmax_position_embeddingsposition_embeddingstype_vocab_sizetoken_type_embeddings	LayerNormlayer_norm_epsDropouthidden_dropout_probdropoutregister_bufferr^   arangeexpandselfrb   	__class__s     r(   r{   zRemBertEmbeddings.__init__   s    !||v::H[H[ 
 $&<<0N0NPVPkPk#l %'\\&2H2H&JeJe%f" f&A&AvG\G\]zz&"<"<= 	ELL)G)GHOOPWXej 	 	
    	input_idstoken_type_idsrw   inputs_embedspast_key_values_lengthreturnc                    ||j                         }n|j                         d d }|d   }|| j                  d d |||z   f   }|:t        j                  |t        j                  | j                  j
                        }|| j                  |      }| j                  |      }||z   }	| j                  |      }
|	|
z  }	| j                  |	      }	| j                  |	      }	|	S )Nrx   r   dtypedevice)sizerw   r^   zeroslongr   r   r   r   r   r   )r   r   r   rw   r   r   input_shape
seq_lengthr   
embeddingsr   s              r(   forwardzRemBertEmbeddings.forward   s      #..*K',,.s3K ^
,,Q0FVlIl0l-lmL!"[[EJJtO`O`OgOghN  00;M $ : :> J"%::
"66|D))
^^J/
\\*-
r   )NNNNr   )__name__
__module____qualname____doc__r{   r   r^   
LongTensorFloatTensorrX   Tensorr   __classcell__r   s   @r(   rr   rr      s    Q
( 15593759&'E,,- !!1!12 u//0	
   1 12 !$ 
r   rr   c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )RemBertPoolerc                     t         |           t        j                  |j                  |j                        | _        t        j                         | _        y r#   )rz   r{   r   Linearhidden_sizedenseTanh
activationr   s     r(   r{   zRemBertPooler.__init__   s9    YYv1163E3EF
'')r   hidden_statesr   c                 \    |d d df   }| j                  |      }| j                  |      }|S )Nr   )r   r   )r   r   first_token_tensorpooled_outputs       r(   r   zRemBertPooler.forward   s6     +1a40

#566r   r   r   r   r{   r^   r   r   r   r   s   @r(   r   r      s#    $
U\\ ell r   r   c                        e Zd Zd fd	Z eddd      	 	 	 	 	 	 ddej                  deej                     deej                     d	eej                     dee	   d
e
deej                     defd       Z xZS )RemBertSelfAttentionc                    t         |           |j                  |j                  z  dk7  r2t	        |d      s&t        d|j                   d|j                   d      |j                  | _        t        |j                  |j                  z        | _        | j                  | j                  z  | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                  | j                        | _        t        j                  |j                        | _        |j"                  | _        || _        y )Nr   embedding_sizezThe hidden size (z6) is not a multiple of the number of attention heads ())rz   r{   r   num_attention_headshasattrr[   rX   attention_head_sizeall_head_sizer   r   querykeyvaluer   attention_probs_dropout_probr   
is_decoder	layer_idxr   rb   r   r   s      r(   r{   zRemBertSelfAttention.__init__   s0    : ::a?PVXhHi#F$6$6#7 8 445Q8 
 $*#=#= #&v'9'9F<V<V'V#W !558P8PPYYv1143E3EF
99V//1C1CDYYv1143E3EF
zz&"E"EF ++"r   past_key_valuepast_key_valuesz4.58)new_nameversionr   attention_mask	head_maskencoder_hidden_statesoutput_attentionscache_positionr   c                    |j                   \  }}	}
| j                  |      j                  |d| j                  | j                        j                  dd      }|d u}|St        |t              rA|j                  j                  | j                        }|r|j                  }n|j                  }n|}|r|n|}|rK|IrGj                  | j                     j                  }|j                  | j                     j                  }n| j!                  |      j                  |d| j                  | j                        j                  dd      }| j#                  |      j                  |d| j                  | j                        j                  dd      }|D|s|nd }j%                  ||| j                  d|i      \  }}|rd|j                  | j                  <   t'        j(                  ||j                  dd            }|t+        j,                  | j                        z  }|||z   }t.        j0                  j3                  |d      }| j5                  |      }|||z  }t'        j(                  ||      }|j7                  dddd	      j9                         }|j;                         d d | j<                  fz   } |j                  | }||fS )
Nrx   r   r>   r   Tdimr   r	   )rZ   r   viewr   r   rY   
isinstancer   
is_updatedgetr   cross_attention_cacheself_attention_cachelayerskeysvaluesr   r   updater^   matmulmathsqrtr   
functionalsoftmaxr   permute
contiguousr   r   )r   r   r   r   r   r   r   r   
batch_sizer   _query_layeris_cross_attentionr   curr_past_key_valuecurrent_states	key_layervalue_layerattention_scoresattention_probscontext_layernew_context_layer_shapes                         r(   r   zRemBertSelfAttention.forward   s    %2$7$7!
JJJ}%T*b$":":D<T<TUYq!_ 	 3$>&/+>?,77;;DNNK
%*9*O*O'*9*N*N'&5#2D.-/"=*+224>>BGGI-44T^^DKKK (j"d&>&>@X@XY1a  

>*j"d&>&>@X@XY1a  *7It)<)C)C{DNN=M~<^*&	; &AEO..t~~> !<<Y5H5HR5PQ+dii8P8P.QQ%/.@ --//0@b/I ,,7  -	9O_kB%--aAq9DDF"/"4"4"6s";t?Q?Q>S"S***,CDo--r   r#   NNNNFN)r   r   r   r{   r   r^   r   r   r   r   booltupler   r   r   s   @r(   r   r      s    #* %0A6R 7;15=A+/"'15Q.||Q. !!2!23Q. E--.	Q.
  ((9(9:Q. "%Q.  Q. !.Q. 
Q. SQ.r   r   c                   n     e Zd Z fdZdej
                  dej
                  dej
                  fdZ xZS )RemBertSelfOutputc                 (   t         |           t        j                  |j                  |j                        | _        t        j                  |j                  |j                        | _        t        j                  |j                        | _
        y Nru   )rz   r{   r   r   r   r   r   r   r   r   r   r   s     r(   r{   zRemBertSelfOutput.__init__8  s`    YYv1163E3EF
f&8&8f>S>STzz&"<"<=r   r   input_tensorr   c                 r    | j                  |      }| j                  |      }| j                  ||z         }|S r#   r   r   r   r   r   r   s      r(   r   zRemBertSelfOutput.forward>  7    

=1]3}|'CDr   r   r   s   @r(   r   r   7  1    >U\\  RWR^R^ r   r   c                        e Zd Zd fd	Zd Z	 	 	 	 	 	 ddej                  deej                     deej                     deej                     dee	   dee
   d	eej                     d
eej                     fdZ xZS )RemBertAttentionc                     t         |           t        ||      | _        t	        |      | _        t               | _        y )Nr   )rz   r{   r   r   r   outputsetpruned_headsr   s      r(   r{   zRemBertAttention.__init__F  s2    (9E	'/Er   c                 >   t        |      dk(  ry t        || j                  j                  | j                  j                  | j
                        \  }}t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _        t        | j                  j                  |      | j                  _	        t        | j                  j                  |d      | j                  _        | j                  j                  t        |      z
  | j                  _        | j                  j                  | j                  j                  z  | j                  _        | j
                  j                  |      | _        y )Nr   r   r   )rW   r   r   r   r   r  r   r   r   r   r  r   r   union)r   headsindexs      r(   prune_headszRemBertAttention.prune_headsM  s   u:?749900$))2O2OQUQbQb
u
 -TYY__eD		*499==%@		,TYY__eD		.t{{/@/@%QO )-		(E(EE
(R		%"&))"?"?$))B_B_"_		 --33E:r   r   r   r   r   r   r   r   r   c           	      r    | j                  |||||||      }| j                  |d   |      }	|	f|dd  z   }
|
S )Nr   r   r   r   r   r   r   r   )r   r  )r   r   r   r   r   r   r   r   self_outputsattention_outputoutputss              r(   r   zRemBertAttention.forward`  s\     yy)"7+/) ! 
  ;;|AF#%QR(88r   r#   r   )r   r   r   r{   r	  r^   r   r   r   r   r   r   r   r   r   s   @r(   r   r   E  s    ";, 7;15=A+/,115|| !!2!23 E--.	
  ((9(9: "% $D> !. 
u||	r   r   c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )RemBertIntermediatec                    t         |           t        j                  |j                  |j
                        | _        t        |j                  t              rt        |j                     | _        y |j                  | _        y r#   )rz   r{   r   r   r   intermediate_sizer   r   
hidden_actstrr
   intermediate_act_fnr   s     r(   r{   zRemBertIntermediate.__init__z  s]    YYv1163K3KL
f''-'-f.?.?'@D$'-'8'8D$r   r   r   c                 J    | j                  |      }| j                  |      }|S r#   )r   r  r   r   s     r(   r   zRemBertIntermediate.forward  s&    

=100?r   r   r   s   @r(   r  r  y  s#    9U\\ ell r   r  c                   n     e Zd Z fdZdej
                  dej
                  dej
                  fdZ xZS )RemBertOutputc                 (   t         |           t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        j                  |j                        | _        y r   )rz   r{   r   r   r  r   r   r   r   r   r   r   r   s     r(   r{   zRemBertOutput.__init__  s`    YYv779K9KL
f&8&8f>S>STzz&"<"<=r   r   r   r   c                 r    | j                  |      }| j                  |      }| j                  ||z         }|S r#   r   r   s      r(   r   zRemBertOutput.forward  r   r   r   r   s   @r(   r  r    r   r   r  c                       e Zd Zd fd	Z	 	 	 	 	 	 	 ddej
                  deej                     deej                     deej                     deej                     dee   dee	   d	eej
                     d
e
ej
                     fdZd Z xZS )RemBertLayerc                 h   t         |           |j                  | _        d| _        t	        ||      | _        |j                  | _        |j                  | _        | j                  r,| j                  st        |  d      t	        ||      | _	        t        |      | _        t        |      | _        y )Nr   z> should be used as a decoder model if cross attention is addedr  )rz   r{   chunk_size_feed_forwardseq_len_dimr   	attentionr   add_cross_attentionr[   crossattentionr  intermediater  r  r   s      r(   r{   zRemBertLayer.__init__  s    '-'E'E$)&)< ++#)#=#= ##?? D6)g!hii"26Y"OD/7#F+r   r   r   r   r   encoder_attention_maskr   r   r   r   c	           	      H   | j                  ||||||      }	|	d   }
|	dd  }| j                  rB|@t        | d      st        d|  d      | j	                  |
||||||      }|d   }
||dd  z   }t        | j                  | j                  | j                  |
      }|f|z   }|S )N)r   r   r   r   r   r   r   r#  z'If `encoder_hidden_states` are passed, z` has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`r  )	r!  r   r   r[   r#  r   feed_forward_chunkr  r   )r   r   r   r   r   r%  r   r   r   self_attention_outputsr  r  cross_attention_outputslayer_outputs                 r(   r   zRemBertLayer.forward  s    "&)/+) "0 "
 2!4(,??4@4!12 =dV DD D 
 '+&9&9 5#&; /"3- ': '#  7q9 7 ;;G0##T%A%A4CSCSUe
  /G+r   c                 L    | j                  |      }| j                  ||      }|S r#   )r$  r  )r   r  intermediate_outputr*  s       r(   r'  zRemBertLayer.feed_forward_chunk  s,    "//0@A{{#68HIr   r#   )NNNNNFN)r   r   r   r{   r^   r   r   r   r   r   r   r   r'  r   r   s   @r(   r  r    s    ,$ 7;15=A>B+/,115.||. !!2!23. E--.	.
  ((9(9:. !)):): ;. "%. $D>. !.. 
u||	.br   r  c                   8    e Zd Z fdZ	 	 	 	 	 	 	 	 	 	 ddej
                  deej                     deej                     deej                     deej                     deeeej                           dee	   d	e	d
e	de	deej
                     de
eef   fdZ xZS )RemBertEncoderc           	      2   t         |           || _        t        j                  |j
                  |j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        d| _        y c c}w )Nr  F)rz   r{   rb   r   r   r~   r   embedding_hidden_mapping_in
ModuleListrangenum_hidden_layersr  layergradient_checkpointing)r   rb   ir   s      r(   r{   zRemBertEncoder.__init__  sq    +-99V5P5PRXRdRd+e(]]uU[UmUmOn#o!L1$E#op
&+# $ps   ,Br   r   r   r   r%  r   	use_cacher   output_hidden_statesreturn_dictr   r   c           
         | j                   r%| j                  r|rt        j                  d       d}|r6|4t	        t        | j                        t        | j                              }|r:t        |t              r*t        j                  d       t	        j                  |      }| j                  |      }|	rdnd }|rdnd }|r| j                  j                  rdnd }t        | j                        D ]U  \  }}|	r||fz   }|||   nd } ||||||||      }|d   }|s-||d   fz   }| j                  j                  sM||d   fz   }W |	r||fz   }|
st        d	 |||||fD              S t        |||||
      S )NzZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...F)rb   zPassing a tuple of `past_key_values` is deprecated and will be removed in Transformers v4.58.0. You should pass an instance of `EncoderDecoderCache` instead, e.g. `past_key_values=EncoderDecoderCache.from_legacy_cache(past_key_values)`.r$   r   r   r>   c              3   $   K   | ]  }|| 
 y wr#   r$   )r%   vs     r(   r)   z)RemBertEncoder.forward.<locals>.<genexpr>%  s      
 = 
r4   )last_hidden_stater   r   
attentionscross_attentions)r5  trainingrD   warning_oncer   r   rb   r   r   from_legacy_cacher0  r"  	enumerater4  r   )r   r   r   r   r   r%  r   r7  r   r8  r9  r   all_hidden_statesall_self_attentionsall_cross_attentionsr6  layer_modulelayer_head_masklayer_outputss                      r(   r   zRemBertEncoder.forward  s    &&4==##p "	01,dkk2RT`hlhshsTtuOOU;\
 2CCOTO88G"6BD$5b4%64;;;Z;Zr`d(4 	VOA|#$58H$H!.7.CilO(%&!M *!,M &9]1=M<O&O#;;22+?=QRCSBU+U()	V,   1]4D D 
 "#%'(
 
 
 9+++*1
 	
r   )
NNNNNNFFTN)r   r   r   r{   r^   r   r   r   r   r   r   r   r   r   r   s   @r(   r.  r.    s   , 7;15=A>BEI$("'%* 15O
||O
 !!2!23O
 E--.	O

  ((9(9:O
 !)):): ;O
 "%e.?.?(@"ABO
 D>O
  O
 #O
 O
 !.O
 
u??	@O
r   r.  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )RemBertPredictionHeadTransformc                 h   t         |           t        j                  |j                  |j                        | _        t        |j                  t              rt        |j                     | _
        n|j                  | _
        t        j                  |j                  |j                        | _        y r   )rz   r{   r   r   r   r   r   r  r  r
   transform_act_fnr   r   r   s     r(   r{   z'RemBertPredictionHeadTransform.__init__;  s{    YYv1163E3EF
f''-$*6+<+<$=D!$*$5$5D!f&8&8f>S>STr   r   r   c                 l    | j                  |      }| j                  |      }| j                  |      }|S r#   )r   rM  r   r  s     r(   r   z&RemBertPredictionHeadTransform.forwardD  s4    

=1--m<}5r   r   r   s   @r(   rK  rK  :  s$    UU\\ ell r   rK  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )RemBertLMPredictionHeadc                 n   t         |           t        j                  |j                  |j
                        | _        t        j                  |j
                  |j                        | _        t        |j                     | _        t        j                  |j
                  |j                        | _        y r   )rz   r{   r   r   r   output_embedding_sizer   r}   decoderr
   r  r   r   r   r   s     r(   r{   z RemBertLMPredictionHead.__init__L  sz    YYv1163O3OP
yy!=!=v?P?PQ !2!23f&B&BH]H]^r   r   r   c                     | j                  |      }| j                  |      }| j                  |      }| j                  |      }|S r#   )r   r   r   rS  r  s     r(   r   zRemBertLMPredictionHead.forwardS  s@    

=16}5]3r   r   r   s   @r(   rP  rP  K  s$    _U\\ ell r   rP  c                   V     e Zd Z fdZdej
                  dej
                  fdZ xZS )RemBertOnlyMLMHeadc                 B    t         |           t        |      | _        y r#   )rz   r{   rP  predictionsr   s     r(   r{   zRemBertOnlyMLMHead.__init__]  s    26:r   sequence_outputr   c                 (    | j                  |      }|S r#   )rX  )r   rY  prediction_scoress      r(   r   zRemBertOnlyMLMHead.forwarda  s     ,,_=  r   r   r   s   @r(   rV  rV  \  s#    ;!u|| ! !r   rV  c                   *    e Zd ZU eed<   eZdZdZd Z	y)RemBertPreTrainedModelrb   rembertTc                    t        |t        j                        rm|j                  j                  j                  d| j                  j                         |j                  %|j                  j                  j                          yyt        |t        j                        rz|j                  j                  j                  d| j                  j                         |j                  2|j                  j                  |j                     j                          yyt        |t        j                        rJ|j                  j                  j                          |j                  j                  j                  d       yy)zInitialize the weightsg        )meanstdNg      ?)r   r   r   r7   r`   normal_rb   initializer_ranger:   zero_r|   rt   r   fill_)r   modules     r(   _init_weightsz$RemBertPreTrainedModel._init_weightsm  s   fbii( MM&&CT[[5R5R&S{{&  &&( '-MM&&CT[[5R5R&S!!-""6#5#56<<> .-KK""$MM$$S) .r   N)
r   r   r   r    __annotations__rp   load_tf_weightsbase_model_prefixsupports_gradient_checkpointingrg  r$   r   r(   r]  r]  f  s    0O!&*#*r   r]  a
  
    The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
    cross-attention is added between the self-attention layers, following the architecture described in [Attention is
    all you need](https://huggingface.co/papers/1706.03762) by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit,
    Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.

    To behave as an decoder the model needs to be initialized with the `is_decoder` argument of the configuration set
    to `True`. To be used in a Seq2Seq model, the model needs to initialized with both `is_decoder` argument and
    `add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.
    )custom_introc            "           e Zd Zd fd	Zd Zd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     deeee	j                           dee   dee   dee   dee   dee	j                     deeef   fd       Z xZS )RemBertModelc                     t         |   |       || _        t        |      | _        t        |      | _        |rt        |      nd| _        | j                          y)zv
        add_pooling_layer (bool, *optional*, defaults to `True`):
            Whether to add a pooling layer
        N)
rz   r{   rb   rr   r   r.  encoderr   pooler	post_init)r   rb   add_pooling_layerr   s      r(   r{   zRemBertModel.__init__  sM    
 	 +F3%f-/@mF+d 	r   c                 .    | j                   j                  S r#   r   r   r   s    r(   get_input_embeddingsz!RemBertModel.get_input_embeddings  s    ...r   c                 &    || j                   _        y r#   ru  )r   r   s     r(   set_input_embeddingsz!RemBertModel.set_input_embeddings  s    */'r   c                     |j                         D ]7  \  }}| j                  j                  |   j                  j	                  |       9 y)z
        Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
        class PreTrainedModel
        N)itemsrp  r4  r!  r	  )r   heads_to_pruner4  r  s       r(   _prune_headszRemBertModel._prune_heads  sE    
 +002 	CLE5LLu%//;;EB	Cr   r   r   r   rw   r   r   r   r%  r   r7  r   r8  r9  r   r   c                 J   ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }| j                   j                  r|
|
n| j                   j
                  }
nd}
||t        d      |#| j                  ||       |j                         }n!||j                         d d }nt        d      |\  }}||j                  n|j                  }d}|	5t        |	t              s|	d   d   j                  d   n|	j                         }|t        j                  |||z   f|      }|&t        j                   |t        j"                  |      }| j%                  ||      }| j                   j                  rE|C|j                         \  }}}||f}|t        j                  ||      }| j'                  |      }nd }| j)                  || j                   j*                        }| j-                  |||||	      }| j/                  ||||||	|
||||
      }|d   }| j0                  | j1                  |      nd }|s
||f|dd  z   S t3        |||j4                  |j6                  |j8                  |j:                        S )NFzDYou cannot specify both input_ids and inputs_embeds at the same timerx   z5You have to specify either input_ids or inputs_embedsr   r   )r   r   )r   rw   r   r   r   )
r   r   r   r%  r   r7  r   r8  r9  r   r   )r=  pooler_outputr   r   r>  r?  )rb   r   r8  use_return_dictr   r7  r[   %warn_if_padding_and_no_attention_maskr   r   r   r   rZ   get_seq_lengthr^   onesr   r   get_extended_attention_maskinvert_attention_maskget_head_maskr3  r   rp  rq  r   r   r   r>  r?  )r   r   r   r   rw   r   r   r   r%  r   r7  r   r8  r9  r   r   r   r   r   r   extended_attention_maskencoder_batch_sizeencoder_sequence_lengthr   encoder_hidden_shapeencoder_extended_attention_maskembedding_outputencoder_outputsrY  r   s                                 r(   r   zRemBertModel.forward  s   $ 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B];;!!%.%:	@U@UII ]%>cdd"66y.Q#..*K&',,.s3KTUU!,
J%.%:!!@T@T!"& "/59  "1%++B/$335 # !"ZZ*jCY6Y)ZdjkN!"[[EJJvVN 150P0PQ_al0m ;;!!&;&G=R=W=W=Y: 7$68O#P %-).4HQW)X&.2.H.HI_.`+.2+ &&y$++2O2OP	??%)'#9 + 
 ,,2"7#B+/!5#) ' 
 *!,8<8OO4UY#]3oab6III;-'+;;)77&11,==
 	
r   )TNNNNNNNNNNNNNN)r   r   r   r{   rw  ry  r}  r   r   r^   r   r   r   r   r   r   r   r   r   r   s   @r(   rn  rn  ~  s    /0C  155959371559=A>BEI$(,0/3&*15m
E,,-m
 !!1!12m
 !!1!12	m

 u//0m
 E--.m
   1 12m
  ((9(9:m
 !)):): ;m
 "%e.?.?(@"ABm
 D>m
 $D>m
 'tnm
 d^m
 !.m
  
uBB	C!m
 m
r   rn  c                       e Zd ZdgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     dee	j                     dee   dee   dee   deeef   fd       ZddZedefd       Z xZS )RemBertForMaskedLMcls.predictions.decoder.weightc                     t         |   |       |j                  rt        j	                  d       t        |d      | _        t        |      | _        | j                          y )NznIf you want to use `RemBertForMaskedLM` make sure `config.is_decoder=False` for bi-directional self-attention.Frs  
rz   r{   r   rD   warningrn  r^  rV  r-   rr  r   s     r(   r{   zRemBertForMaskedLM.__init__  sR     NN1
 $FeD%f- 	r   c                 B    | j                   j                  j                  S r#   r-   rX  rS  rv  s    r(   get_output_embeddingsz(RemBertForMaskedLM.get_output_embeddings-      xx##+++r   c                 :    || j                   j                  _        y r#   r  r   new_embeddingss     r(   set_output_embeddingsz(RemBertForMaskedLM.set_output_embeddings0      '5$r   r   r   r   rw   r   r   r   r%  labelsr   r8  r9  r   c                    ||n| j                   j                  }| j                  |||||||||
||      }|d   }| j                  |      }d}|	Ft	               } ||j                  d| j                   j                        |	j                  d            }|s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
            config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the
            loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
        N)
r   r   rw   r   r   r   r%  r   r8  r9  r   rx   r>   losslogitsr   r>  )
rb   r  r^  r-   r   r   r}   r   r   r>  )r   r   r   r   rw   r   r   r   r%  r  r   r8  r9  r  rY  r[  masked_lm_lossloss_fctr  s                      r(   r   zRemBertForMaskedLM.forward3  s    , &1%<k$++B]B],,))%'"7#9/!5#  
 "!* HH_5')H%&7&<&<RAWAW&XZ`ZeZefhZijN')GABK7F3A3M^%.YSYY$!//))	
 	
r   c                    |j                   }|d   }| j                  j                  J d       t        j                  ||j                  |j                   d   df      gd      }t        j                  |df| j                  j                  t        j                  |j                        }t        j                  ||gd      }||dS )Nr   z.The PAD token should be defined for generationr   rx   r   r   )r   r   )	rZ   rb   r   r^   cat	new_zerosfullr   r   )r   r   r   model_kwargsr   effective_batch_sizedummy_tokens          r(   prepare_inputs_for_generationz0RemBertForMaskedLM.prepare_inputs_for_generationl  s    oo*1~ {{''3e5ee3NN4L4LnNbNbcdNeghMi4j#kqstjj!1%t{{'?'?uzzZcZjZj
 IIy+6A>	&.IIr   c                      y)z
        Legacy correction: RemBertForMaskedLM can't call `generate()` from `GenerationMixin`, even though it has a
        `prepare_inputs_for_generation` method.
        Fr$   )r-   s    r(   can_generatezRemBertForMaskedLM.can_generatez  s     r   )NNNNNNNNNNNNr#   )r   r   r   _tied_weights_keysr{   r  r  r   r   r^   r   r   r   r   r   r   r   r  classmethodr  r   r   s   @r(   r  r    sp   :;,6  155959371559=A>B-1,0/3&*6
E,,-6
 !!1!126
 !!1!12	6

 u//06
 E--.6
   1 126
  ((9(9:6
 !)):): ;6
 ))*6
 $D>6
 'tn6
 d^6
 
un$	%6
 6
pJ T  r   r  zS
    RemBERT Model with a `language modeling` head on top for CLM fine-tuning.
    c            "           e Zd ZdgZ fdZd Zd Ze	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddee	j                     dee	j                     dee	j                     dee	j                     d	ee	j                     d
ee	j                     dee	j                     dee	j                     deeee	j                           dee	j                     dee   dee   dee   dee   deeef   fd       Z xZS )RemBertForCausalLMr  c                     t         |   |       |j                  st        j	                  d       t        |d      | _        t        |      | _        | j                          y )NzOIf you want to use `RemBertForCausalLM` as a standalone, add `is_decoder=True.`Fr  r  r   s     r(   r{   zRemBertForCausalLM.__init__  sL       NNlm#FeD%f- 	r   c                 B    | j                   j                  j                  S r#   r  rv  s    r(   r  z(RemBertForCausalLM.get_output_embeddings  r  r   c                 :    || j                   j                  _        y r#   r  r  s     r(   r  z(RemBertForCausalLM.set_output_embeddings  r  r   r   r   r   rw   r   r   r   r%  r   r  r7  r   r8  r9  r   c                    ||n| j                   j                  }| j                  |||||||||	||||      }|d   }| j                  |      }d}|
* | j                  ||
fd| j                   j
                  i|}|s|f|dd z   }||f|z   S |S t        |||j                  |j                  |j                  |j                        S )a  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
            `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are
            ignored (masked), the loss is only computed for the tokens with labels n `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, RemBertForCausalLM, RemBertConfig
        >>> import torch

        >>> tokenizer = AutoTokenizer.from_pretrained("google/rembert")
        >>> config = RemBertConfig.from_pretrained("google/rembert")
        >>> config.is_decoder = True
        >>> model = RemBertForCausalLM.from_pretrained("google/rembert", config=config)

        >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
        >>> outputs = model(**inputs)

        >>> prediction_logits = outputs.logits
        ```N)r   r   rw   r   r   r   r%  r   r7  r   r8  r9  r   r}   r>   )r  r  r   r   r>  r?  )rb   r  r^  r-   loss_functionr}   r   r   r   r>  r?  )r   r   r   r   rw   r   r   r   r%  r   r  r7  r   r8  r9  kwargsr  rY  r[  lm_lossr  s                        r(   r   zRemBertForCausalLM.forward  s   R &1%<k$++B]B],,))%'"7#9+/!5#  
  "!* HH_5(d((!  ;;11 	G ')GABK7F,3,?WJ'KVK0$#33!//))$55
 	
r   r  )r   r   r   r  r{   r  r  r   r   r^   r   r   r   r   r   r   r   r   r   s   @r(   r  r    s    ;;
,6  155959371559=A>BEI-1$(,0/3&*Q
E,,-Q
 !!1!12Q
 !!1!12	Q

 u//0Q
 E--.Q
   1 12Q
  ((9(9:Q
 !)):): ;Q
 "%e.?.?(@"ABQ
 ))*Q
 D>Q
 $D>Q
 'tnQ
 d^Q
" 
u77	8#Q
 Q
r   r  z
    RemBERT Model transformer with a sequence classification/regression head on top (a linear layer on top of the
    pooled output) e.g. for GLUE tasks.
    c                   D    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee	   d
ee	   dee	   de
eef   fd       Z xZS ) RemBertForSequenceClassificationc                 ,   t         |   |       |j                  | _        t        |      | _        t        j                  |j                        | _        t        j                  |j                  |j                        | _        | j                          y r#   rz   r{   
num_labelsrn  r^  r   r   classifier_dropout_probr   r   r   r=   rr  r   s     r(   r{   z)RemBertForSequenceClassification.__init__  si      ++#F+zz&"@"@A))F$6$68I8IJ 	r   r   r   r   rw   r   r   r  r   r8  r9  r   c                 @   |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }| j	                  |      }d}|| j                   j
                  | j                  dk(  rd| j                   _        nl| j                  dkD  rL|j                  t        j                  k(  s|j                  t        j                  k(  rd| j                   _        nd| j                   _        | j                   j
                  dk(  rIt               }| j                  dk(  r& ||j                         |j                               }n |||      }n| j                   j
                  dk(  r=t               } ||j                  d| j                        |j                  d            }n,| j                   j
                  dk(  rt               } |||      }|
s|f|dd z   }||f|z   S |S t!        |||j"                  |j$                  	      S )
a  
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
            config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
            `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
        Nr   r   rw   r   r   r   r8  r9  r   
regressionsingle_label_classificationmulti_label_classificationrx   r>   r  )rb   r  r^  r   r=   problem_typer  r   r^   r   rX   r   squeezer   r   r   r   r   r>  )r   r   r   r   rw   r   r   r  r   r8  r9  r  r   r  r  r  r  s                    r(   r   z(RemBertForSequenceClassification.forward  s   ( &1%<k$++B]B],,))%'/!5#  

  
]3/{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#FF3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE'!//))	
 	
r   
NNNNNNNNNN)r   r   r   r{   r   r   r^   r   r   r   r   r   r   r   r   r   s   @r(   r  r    s     266:59481559-1,0/3&*E
E--.E
 !!2!23E
 !!1!12	E

 u001E
 E--.E
   1 12E
 ))*E
 $D>E
 'tnE
 d^E
 
u..	/E
 E
r   r  c                   D    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee	   d
ee	   dee	   de
eef   fd       Z xZS )RemBertForMultipleChoicec                     t         |   |       t        |      | _        t	        j
                  |j                        | _        t	        j                  |j                  d      | _
        | j                          y )Nr   )rz   r{   rn  r^  r   r   r  r   r   r   r=   rr  r   s     r(   r{   z!RemBertForMultipleChoice.__init__N  sV     #F+zz&"@"@A))F$6$6: 	r   r   r   r   rw   r   r   r  r   r8  r9  r   c                 L   |
|
n| j                   j                  }
||j                  d   n|j                  d   }|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|!|j                  d|j	                  d            nd}|1|j                  d|j	                  d      |j	                  d            nd}| j                  ||||||||	|
	      }|d   }| j                  |      }| j                  |      }|j                  d|      }d}|t               } |||      }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )a[  
        input_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`):
            Indices of input sequence tokens in the vocabulary.

            Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
            [`PreTrainedTokenizer.__call__`] for details.

            [What are input IDs?](../glossary#input-ids)
        token_type_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
            1]`:

            - 0 corresponds to a *sentence A* token,
            - 1 corresponds to a *sentence B* token.

            [What are token type IDs?](../glossary#token-type-ids)
        position_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
            Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
            config.max_position_embeddings - 1]`.

            [What are position IDs?](../glossary#position-ids)
        inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_choices, sequence_length, hidden_size)`, *optional*):
            Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
            is useful if you want more control over how to convert *input_ids* indices into associated vectors than the
            model's internal embedding lookup matrix.
        labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
            Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
            num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
            `input_ids` above)
        Nr   rx   r   r  r>   r  )rb   r  rZ   r   r   r^  r   r=   r   r   r   r>  )r   r   r   r   rw   r   r   r  r   r8  r9  num_choicesr  r   r  reshaped_logitsr  r  r  s                      r(   r   z RemBertForMultipleChoice.forwardX  s   X &1%<k$++B]B],5,Aiooa(}GZGZ[\G]>G>SINN2y~~b'9:Y]	M[Mg,,R1D1DR1HImqM[Mg,,R1D1DR1HImqGSG_|((\->->r-BCei ( r=#5#5b#9=;M;Mb;QR 	 ,,))%'/!5#  

  
]3/ ++b+6')HOV4D%''!"+5F)-)9TGf$EvE("!//))	
 	
r   r  )r   r   r   r{   r   r   r^   r   r   r   r   r   r   r   r   r   s   @r(   r  r  L  s     266:59481559-1,0/3&*X
E--.X
 !!2!23X
 !!1!12	X

 u001X
 E--.X
   1 12X
 ))*X
 $D>X
 'tnX
 d^X
 
u//	0X
 X
r   r  c                   D    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	ee	   d
ee	   dee	   de
eef   fd       Z xZS )RemBertForTokenClassificationc                 0   t         |   |       |j                  | _        t        |d      | _        t        j                  |j                        | _        t        j                  |j                  |j                        | _        | j                          y NFr  r  r   s     r(   r{   z&RemBertForTokenClassification.__init__  sk      ++#FeDzz&"@"@A))F$6$68I8IJ 	r   r   r   r   rw   r   r   r  r   r8  r9  r   c                    |
|
n| j                   j                  }
| j                  ||||||||	|
	      }|d   }| j                  |      }| j	                  |      }d}|<t               } ||j                  d| j                        |j                  d            }|
s|f|dd z   }||f|z   S |S t        |||j                  |j                        S )z
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`.
        Nr  r   rx   r>   r  )rb   r  r^  r   r=   r   r   r  r   r   r>  )r   r   r   r   rw   r   r   r  r   r8  r9  r  rY  r  r  r  r  s                    r(   r   z%RemBertForTokenClassification.forward  s    $ &1%<k$++B]B],,))%'/!5#  

 "!*,,71')HFKKDOO<fkk"oNDY,F)-)9TGf$EvE$!//))	
 	
r   r  )r   r   r   r{   r   r   r^   r   r   r   r   r   r   r   r   r   s   @r(   r  r    s   	  266:59481559-1,0/3&*2
E--.2
 !!2!232
 !!1!12	2

 u0012
 E--.2
   1 122
 ))*2
 $D>2
 'tn2
 d^2
 
u++	,2
 2
r   r  c                   d    e Zd Z fdZe	 	 	 	 	 	 	 	 	 	 	 ddeej                     deej                     deej                     deej                     deej                     deej                     deej                     d	eej                     d
ee	   dee	   dee	   de
eef   fd       Z xZS )RemBertForQuestionAnsweringc                     t         |   |       |j                  | _        t        |d      | _        t        j                  |j                  |j                        | _        | j                          y r  )
rz   r{   r  rn  r^  r   r   r   
qa_outputsrr  r   s     r(   r{   z$RemBertForQuestionAnswering.__init__  sU      ++#FeD))F$6$68I8IJ 	r   r   r   r   rw   r   r   start_positionsend_positionsr   r8  r9  r   c                    ||n| j                   j                  }| j                  |||||||	|
|	      }|d   }| j                  |      }|j	                  dd      \  }}|j                  d      }|j                  d      }d }||t        |j                               dkD  r|j                  d      }t        |j                               dkD  r|j                  d      }|j                  d      }|j                  d|       |j                  d|       t        |      } |||      } |||      }||z   dz  }|s||f|dd  z   }||f|z   S |S t        ||||j                  |j                        S )	Nr  r   r   rx   r   )ignore_indexr>   )r  start_logits
end_logitsr   r>  )rb   r  r^  r  rQ   r  rW   r   clamp_r   r   r   r>  )r   r   r   r   rw   r   r   r  r  r   r8  r9  r  rY  r  r  r  
total_lossignored_indexr  
start_lossend_lossr  s                          r(   r   z#RemBertForQuestionAnswering.forward  s    &1%<k$++B]B],,))%'/!5#  

 "!*1#)<<r<#: j#++B/''+

&=+D?'')*Q."1"9"9""==%%'(1, - 5 5b 9(--a0M""1m4  M2']CH!,@J
M:H$x/14J"J/'!"+=F/9/EZMF*Q6Q+%!!//))
 	
r   )NNNNNNNNNNN)r   r   r   r{   r   r   r^   r   r   r   r   r   r   r   r   r   s   @r(   r  r    s$   	  266:594815596:48,0/3&*>
E--.>
 !!2!23>
 !!1!12	>

 u001>
 E--.>
   1 12>
 "%"2"23>
   0 01>
 $D>>
 'tn>
 d^>
 
u22	3>
 >
r   r  )
r  r  r  r  r  r  r  rn  r]  rp   )Gr   r   rF   typingr   r   r^   torch.utils.checkpointr   torch.nnr   r   r   activationsr
   cache_utilsr   r   r   
generationr   modeling_layersr   modeling_outputsr   r   r   r   r   r   r   r   modeling_utilsr   pytorch_utilsr   r   r   utilsr   r   utils.deprecationr   configuration_rembertr    
get_loggerr   rD   rp   Modulerr   r   r   r   r   r  r  r  r.  rK  rP  rV  r]  rn  r  r  r  r  r  r  __all__r$   r   r(   <module>r     sV     	 "    A A ! C C ) 9	 	 	 . l l , 0 0 
		H	%Pf3		 3nBII h.299 h.X		 0ryy 0h"))  BII D- DNX
RYY X
xRYY "bii "! ! *_ * *. 	M
) M
M
` e/ e eP 
g
/ g

g
T Q
'= Q
Q
h d
5 d
 d
N ?
$: ?
 ?
D K
"8 K
 K
\r   