
    +h                        d dl mZ d dlZd dlmZmZmZ d dlmZ d dl	m
Z
 d dlmZ ddlmZ d	d
lmZmZ d	dlmZ ddlmZ ddlmZ d	dlmZ erd dlmZ ddZ G d de      Z G d d      Zy)    )annotationsN)TYPE_CHECKINGOptionalcast)ArgumentParser)partial)
Completion   )
get_client   )	NOT_GIVEN
NotGivenOr)is_given)CLIError)	BaseModel)Stream)_SubParsersActionc                   | j                  d      }|j                  dddd       |j                  ddd	
       |j                  ddd       |j                  dddt               |j                  dddt               |j                  dddt               |j                  dddt               |j                  ddt               |j                  ddt               |j                  d d!d       |j                  d"d#t               |j                  d$d%t               |j                  d&d'
       |j                  d(d)
       |j                  d*d+
       |j	                  t
        j                  t        ,       y )-Nzcompletions.createz-mz--modelzThe model to useT)helprequiredz-pz--promptz#An optional prompt to complete from)r   z--streamzStream tokens as they're ready.
store_true)r   actionz-Mz--max-tokensz(The maximum number of tokens to generate)r   typez-tz--temperaturezWhat sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

Mutually exclusive with `top_p`.z-Pz--top_pa  An alternative to sampling with temperature, called nucleus sampling, where the considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%% probability mass are considered.

            Mutually exclusive with `temperature`.z-nz--nz5How many sub-completions to generate for each prompt.z
--logprobsa  Include the log probabilites on the `logprobs` most likely tokens, as well the chosen tokens. So for example, if `logprobs` is 10, the API will return a list of the 10 most likely tokens. If `logprobs` is 0, only the chosen tokens will have logprobs returned.z	--best_ofzGenerates `best_of` completions server-side and returns the 'best' (the one with the highest log probability per token). Results cannot be streamed.z--echoz2Echo back the prompt in addition to the completionz--frequency_penaltyzPositive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.z--presence_penaltyzPositive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.z--suffixz:The suffix that comes after a completion of inserted text.z--stopz3A stop sequence at which to stop generating tokens.z--userzbA unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.)func
args_model)
add_parseradd_argumentintfloatset_defaultsCLICompletionscreateCLICompletionCreateArgs)	subparsersubs     N/var/www/html/venv/lib/python3.12/site-packages/openai/cli/_api/completions.pyregisterr'      s   


3
4C 	   T:,QRZ&GP\]T>0Zade$    6    D	    S  
  d  
 A  
  m  
  `  
 Z&bcX$YZq  
 .//<ST    c                      e Zd ZU ded<   dZded<   dZded<   eZd	ed
<   eZded<   eZ	ded<   eZ
ded<   eZded<   eZd	ed<   eZded<   eZd	ed<   eZd	ed<   eZded<   eZded<   eZded<   y)r#   strmodelFboolstreamNzOptional[str]promptzNotGivenOr[int]nzNotGivenOr[str]stopuserzNotGivenOr[bool]echosuffixbest_ofzNotGivenOr[float]top_plogprobs
max_tokenstemperaturepresence_penaltyfrequency_penalty)__name__
__module____qualname____annotations__r-   r.   r   r/   r0   r1   r2   r3   r4   r5   r6   r7   r8   r9   r:    r(   r&   r#   r#   ]   s    JFD FM "A"%D/%%D/%&D
&'FO'(G_((E( )Ho)"+J+%.K".*3'3+4(4r(   r#   c                  B    e Zd Zedd       Zedd       Zedd       Zy)r!   c                   t        | j                        r&| j                  dkD  r| j                  rt        d      t	        t               j                  j                  | j                  | j                  | j                  | j                  | j                  | j                  | j                  | j                  | j                  | j                   | j"                  | j$                  | j&                  | j(                        }| j                  r1t*        j-                  t/        t0        t2            |d                  S t*        j5                   |             S )N   z6Can't stream completions with n>1 with the current CLI)r/   r2   r0   r1   r+   r5   r.   r3   r4   r6   r7   r8   r9   r:   T)r-   )r   r/   r-   r   r   r   completionsr"   r2   r0   r1   r+   r5   r.   r3   r4   r6   r7   r8   r9   r:   r!   _stream_creater   r   r	   _create)argsmake_requests     r&   r"   zCLICompletions.createq   s    DFF
t{{STTL$$++ff****;;;;LL]]((!22"44
$ ;;!00VJ'T)BC 
 %%ln55r(   c                   t        | j                        dkD  }| j                  D ]  }|r8t        j                  j	                  dj                  |j                               t        j                  j	                  |j                         |s|j                  j                  d      st        j                  j	                  d       t        j                  j                           y )NrB   z===== Completion {} =====

)
lenchoicessysstdoutwriteformatindextextendswithflush)
completionshould_print_headerchoices      r&   rE   zCLICompletions._create   s    !*"4"459 (( 		F"

  !>!E!Efll!STJJV[[)"&++*>*>t*D

  &JJ		r(   c                   | D ]  }t        |j                        dkD  }t        |j                  d       D ]  }|r8t        j                  j                  dj                  |j                               t        j                  j                  |j                         |rt        j                  j                  d       t        j                  j                            t        j                  j                  d       y )NrB   c                    | j                   S )N)rP   )cs    r&   <lambda>z/CLICompletions._stream_create.<locals>.<lambda>   s
    177 r(   )keyz===== Chat Completion {} =====
rI   )
rJ   rK   sortedrL   rM   rN   rO   rP   rQ   rS   )r-   rT   rU   rV   s       r&   rD   zCLICompletions._stream_create   s      	#J"%j&8&8"9A"= !3!39JK 	#&JJ$$%G%N%Nv||%\]

  -&JJ$$T*

  "	#	# 	

r(   N)rF   r#   returnNone)rT   r	   r]   r^   )r-   zStream[Completion]r]   r^   )r;   r<   r=   staticmethodr"   rE   rD   r?   r(   r&   r!   r!   p   s<    6 6<    r(   r!   )r$   z!_SubParsersAction[ArgumentParser]r]   r^   )
__future__r   rL   typingr   r   r   argparser   	functoolsr   openai.types.completionr	   _utilsr   _typesr   r   r   _errorsr   _modelsr   
_streamingr   r   r'   r#   r!   r?   r(   r&   <module>rj      sP    " 
 0 0 #  .  +     *EUP5i 5&= =r(   