
    h2                    0   d dl mZ d dlZd dlmZmZ d dlmZ d dlm	Z	m
Z
mZmZmZmZ d dlmZ d dlmZ d dlmZ d d	lmZ d d
lmZmZmZ d dlmZ dZ G d de      Z G d de      Z G d de      Z  G d de      Z! G d dee      Z" G d dee"      Z#y)    )annotationsN)ABCabstractmethod)Enum)AnyDictIteratorListMappingOptional)CallbackManagerForLLMRun)LLM)GenerationChunk)pre_init)	BaseModel
ConfigDictField)enforce_stop_tokenszocid1.generativeaiendpointc                  :    e Zd Zeedd              Zedd       Zy)Providerc                     y N selfs    h/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/langchain_community/llms/oci_generative_ai.pystop_sequence_keyzProvider.stop_sequence_key   s    (+    c                     y r   r   r   responses     r   completion_response_to_textz$Provider.completion_response_to_text   s    ADr   Nreturnstrr!   r   r$   r%   )__name__
__module____qualname__propertyr   r   r"   r   r   r   r   r      s&    +  +D Dr   r   c                  ,    e Zd ZU dZded<   ddZddZy)	CohereProviderstop_sequencesr%   r   c                2    ddl m} |j                  | _        y Nr   models)oci.generative_ai_inferencer1   CohereLlmInferenceRequestllm_inference_requestr   r1   s     r   __init__zCohereProvider.__init__   s    6%+%E%E"r   c                \    |j                   j                  j                  d   j                  S Nr   )datainference_responsegenerated_textstextr    s     r   r"   z*CohereProvider.completion_response_to_text$   s#    }}//??BGGGr   Nr$   Noner&   r'   r(   r)   r   __annotations__r6   r"   r   r   r   r,   r,      s    -s-F
Hr   r,   c                  ,    e Zd ZU dZded<   ddZddZy)	MetaProviderstopr%   r   c                2    ddl m} |j                  | _        y r/   )r2   r1   LlamaLlmInferenceRequestr4   r5   s     r   r6   zMetaProvider.__init__+   s    6%+%D%D"r   c                \    |j                   j                  j                  d   j                  S r8   )r9   r:   choicesr<   r    s     r   r"   z(MetaProvider.completion_response_to_text0   s#    }}//77:???r   Nr=   r&   r?   r   r   r   rB   rB   (   s    #s#E
@r   rB   c                       e Zd ZdZdZdZdZdZy)OCIAuthTypez'OCI authentication types as enumerator.            N)r'   r(   r)   __doc__API_KEYSECURITY_TOKENINSTANCE_PRINCIPALRESOURCE_PRINCIPALr   r   r   rI   rI   4   s    1GNr   rI   c                     e Zd ZU dZ edd      Zded<   dZded	<   	 d
Zded<   	 dZ	ded<   	 dZ
ded<   	 dZded<   	 dZded<   	 dZded<   	 dZded<   	 dZded<   	  eddd      Zedd       Zedd       ZddZy) OCIGenAIBasezBase class for OCI GenAI modelsNT)defaultexcluder   clientrO   zOptional[str]	auth_typeDEFAULTauth_profilez~/.oci/configauth_file_locationmodel_idproviderzOptional[Dict]model_kwargsservice_endpointcompartment_idFbool	is_streamforbidr   )extraarbitrary_types_allowedprotected_namespacesc                   |d   |S 	 ddl i d|d   j                  j                  dd}|d   t        d      j                  k(  r9j
                  j                  |d	   |d
         |d<   |j                  dd       n|d   t        d      j                  k(  r@	 	 	 	 dfd}j
                  j                  |d	   |d
         |d<    ||d         |d<   n|d   t        d      j                  k(  r(j                  j                  j                         |d<   nU|d   t        d      j                  k(  r(j                  j                  j                         |d<   nt        d|d    d       j                  j                  di ||d<   |S # t        $ r}t!        d      |d}~wt"        $ r}t        d|      |d}~ww xY w)zBValidate that OCI config and python package exists in environment.rW   Nr   r_   )
      )configsignerr_   retry_strategytimeoutrX   rJ   r[   rZ   )file_locationprofile_namerj   rk   rK   c                :   j                   j                  | j                  d      d       }t        t	        | j                  d            d      5 }|j                         }d d d        j                  j                  j                  |      S # 1 sw Y   /xY w)Nkey_filesecurity_token_filezutf-8)encoding)	rk   load_private_key_from_filegetopenr%   readauthsignersSecurityTokenSigner)
oci_configpkf	st_stringocis       r   make_security_token_signerzEOCIGenAIBase.validate_environment.<locals>.make_security_token_signer   s     >>"z2DB JNN+@ABW -$%FFH	- 88++??	2NN	- -s   BB)r{   rL   rM   z)Please provide valid value to auth_type, z is not valid.zYCould not import oci python package. Please make sure you have the oci package installed.zCould not authenticate with OCI client.
                If INSTANCE_PRINCIPAL or RESOURCE_PRINCIPAL is used, 
                please check the specified
                auth_profile, auth_file_location and auth_type are valid.)r{   zdict[str, Any]r$   z&'oci.auth.signers.SecurityTokenSigner'r   )r   retryDEFAULT_RETRY_STRATEGYrI   namerj   	from_filepoprx   ry   %InstancePrincipalsSecurityTokenSignerget_resource_principals_signer
ValueErrorgenerative_ai_inferenceGenerativeAiInferenceClientImportErrorModuleNotFoundError	Exception)clsvaluesclient_kwargsr   exer   s         @r   validate_environmentz!OCIGenAIBase.validate_environmentp   s1   
 ('MD	 $*+=$>"%))"B"B$M k"k!n&9&99*-***>*>"()=">!'!7 +? +h' !!(D1$A(;(;;
O .
O;
O +.***>*>"()=">!'!7 +? +h' +E,X6+h' $A(;(;;HH$$JJL h' $A(;(;;HH$$CCE h' !?k*+>; 
  Ws::VV   F8$   	%G   	M  	s$   F	F 	GF**G6GGc                .    | j                   xs i }i d|iS )zGet the identifying parameters.r^   )r^   )r   _model_kwargss     r   _identifying_paramsz OCIGenAIBase._identifying_params   s+     ))/R
}-
 	
r   c                    | j                   | j                   }nC| j                  t        d      | j                  j                  d      d   j	                         }||vrt        d| j                   d      ||   S )Nzmodel_id is required to derive the provider, please provide the provider explicitly or specify the model_id to derive the provider..r   z(Invalid provider derived from model_id: zL Please explicitly pass in the supported provider when using custom endpoint)r]   r\   r   splitlower)r   provider_mapr]   s      r   _get_providerzOCIGenAIBase._get_provider   s    ==$}}H}}$ ; 
 }}**3/288:H<':4==/ J- - 
 H%%r   )r   r   r$   r   r$   Mapping[str, Any])r   r   r$   r   )r'   r(   r)   rN   r   rW   r@   rX   rZ   r[   r\   r]   r^   r_   r`   rb   r   model_configr   r   r*   r   r   r   r   r   rT   rT   =   s    )d3FC3(I}( #,L-+ )87 #Hm"7"Hm"
 $(L.'0&*m*$(NM(It12L M M^ 
 
&r   rT   c                      e Zd ZdZ edd      Zedd       Zedd       Zedd       Z		 	 	 	 	 	 	 	 ddZ
dd	Z	 	 d	 	 	 	 	 	 	 	 	 ddZ	 	 d	 	 	 	 	 	 	 	 	 ddZy
)OCIGenAIa  OCI large language models.

    To authenticate, the OCI client uses the methods described in
    https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm

    The authentifcation method is passed through auth_type and should be one of:
    API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPAL, RESOURCE_PRINCIPAL

    Make sure you have the required policies (profile/roles) to
    access the OCI Generative AI service.
    If a specific config profile is used, you must pass
    the name of the profile (from ~/.oci/config) through auth_profile.
    If a specific config file location is used, you must pass
    the file location where profile name configs present
    through auth_file_location

    To use, you must provide the compartment id
    along with the endpoint url, and model id
    as named parameters to the constructor.

    Example:
        .. code-block:: python

            from langchain_community.llms import OCIGenAI

            llm = OCIGenAI(
                    model_id="MY_MODEL_ID",
                    service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com",
                    compartment_id="MY_OCID"
                )
    rc   T)rd   re   c                     y)zReturn type of llm.oci_generative_ai_completionr   r   s    r   	_llm_typezOCIGenAI._llm_type  s     .r   c                ,    t               t               dS )zGet the provider map)coheremeta)r,   rB   r   s    r   _provider_mapzOCIGenAI._provider_map  s     %& N
 	
r   c                :    | j                  | j                        S )z Get the internal provider object)r   )r   r   r   s    r   	_providerzOCIGenAI._provider  s     !!t/A/A!BBr   c           	        ddl m} | j                  xs i }|||| j                  j                  <   | j
                  t        d      | j
                  j                  t              r|j                  | j
                        }n|j                  | j
                        }i ||}||d<   | j                  |d<   |j                  | j                  | | j                  j                  d	i |      }|S )
Nr   r0   zDmodel_id is required to call the model, please provide the model_id.)endpoint_id)r\   promptrb   )r`   serving_modeinference_requestr   )r2   r1   r^   r   r   r\   r   
startswithCUSTOM_ENDPOINT_PREFIXDedicatedServingModeOnDemandServingModerb   GenerateTextDetailsr`   r4   )	r   r   rC   kwargsr1   r   r   inference_paramsinvocation_objs	            r   _prepare_invocation_objectz#OCIGenAI._prepare_invocation_object  s     	7))/R>BM$..::;== V  ==##$:;!664==6QL!55t}}5ML6m6v6%+"(,%33..%BdnnBBVEUV 4 
 r   c                X    | j                   j                  |      }|t        ||      }|S r   )r   r"   r   )r   r!   rC   r<   s       r   _process_responsezOCIGenAI._process_response4  s-    ~~99(C&tT2Dr   Nc                   | j                   r;d} | j                  |||fi |D ]  }||j                  z  } |t        ||      }|S | j	                  |||      }| j
                  j                  |      }| j                  ||      S )ak  Call out to OCIGenAI generate endpoint.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            The string generated by the model.

        Example:
            .. code-block:: python

               response = llm.invoke("Tell me a joke.")
         )rb   _streamr<   r   r   rW   generate_textr   )	r   r   rC   run_managerr   r<   chunkr   r!   s	            r   _callzOCIGenAI._call<  s    * >>D%fdKJ6J #

"#*46K88vN;;,,^<%%h55r   c              +  h  K   d| _         | j                  |||      }| j                  j                  |      }|j                  j                         D ]\  }t        j                  |j                        }d|v r|d   }	nd}	t        |	      }
|r|j                  |
j                  |
       |
 ^ yw)ac  Stream OCIGenAI LLM on given prompt.

        Args:
            prompt: The prompt to pass into the model.
            stop: Optional list of stop words to use when generating.

        Returns:
            An iterator of GenerationChunks.

        Example:
            .. code-block:: python

            response = llm.stream("Tell me a joke.")
        Tr<   r   )r<   )r   N)rb   r   rW   r   r9   eventsjsonloadsr   on_llm_new_tokenr<   )r   r   rC   r   r   r   r!   event	json_loadevent_data_textr   s              r   r   zOCIGenAI._stream]  s     , 88vN;;,,^<]]))+ 		E

5::.I""+F"3"$#9E,,UZZu,EK		s   B0B2r#   r   )r$   r   )r   r%   rC   Optional[List[str]]r   Dict[str, Any]r$   r   )r!   r   rC   r   r$   r%   )NN)
r   r%   rC   r   r   "Optional[CallbackManagerForLLMRun]r   r   r$   r%   )
r   r%   rC   r   r   r   r   r   r$   zIterator[GenerationChunk])r'   r(   r)   rN   r   r   r*   r   r   r   r   r   r   r   r   r   r   r   r      s    @  $L
 . . 
 
 C C!4>L	> %):>	66 "6 8	6
 6 
6H %):>	## "# 8	#
 # 
##r   r   )$
__future__r   r   abcr   r   enumr   typingr   r   r	   r
   r   r   langchain_core.callbacksr   #langchain_core.language_models.llmsr   langchain_core.outputsr   langchain_core.utilsr   pydanticr   r   r   langchain_community.llms.utilsr   r   r   r,   rB   rI   rT   r   r   r   r   <module>r      s    "  #  ? ? = 3 2 ) 1 1 >5 Es E	HX 	H	@8 	@$ ]&9c ]&@csL cr   