
    zvhq                    l   d dl mZ d dlmZmZmZmZmZ d dlm	Z	 d dl
Z
ddlmZ ddlmZmZmZ ddlmZmZmZmZmZmZ dd	lmZmZmZmZ dd
lmZ ddlm Z m!Z! ddl"m#Z#m$Z$ ddl%m&Z& ddl'm(Z( ddl)m*Z* ddgZ+ G d de       Z, G d de!      Z- G d d      Z. G d d      Z/ G d d      Z0 G d d      Z1y)    )annotations)ListUnionMappingOptionalcast)LiteralN   )_legacy_response)image_edit_paramsimage_generate_paramsimage_create_variation_params)	NOT_GIVENBodyQueryHeadersNotGiven	FileTypes)extract_filesmaybe_transformdeepcopy_minimalasync_maybe_transform)cached_property)SyncAPIResourceAsyncAPIResource)to_streamed_response_wrapper"async_to_streamed_response_wrapper)make_request_options)
ImageModel)ImagesResponseImagesAsyncImagesc                  N   e Zd Zed
d       Zedd       Zeeeeeddded		 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZeeeeeeeeeeddded	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZeeeeeeeeeeeddded	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd	Z	y)r!   c                    t        |       S a  
        This property can be used as a prefix for any HTTP method call to return
        the raw response object instead of the parsed content.

        For more information, see https://www.github.com/openai/openai-python#accessing-raw-response-data-eg-headers
        )ImagesWithRawResponseselfs    O/var/www/html/luna/venv/lib/python3.12/site-packages/openai/resources/images.pywith_raw_responsezImages.with_raw_response   s     %T**    c                    t        |       S z
        An alternative to `.with_raw_response` that doesn't eagerly read the response body.

        For more information, see https://www.github.com/openai/openai-python#with_streaming_response
        )ImagesWithStreamingResponser'   s    r)   with_streaming_responsezImages.with_streaming_response#   s     +400r+   N	modelnresponse_formatsizeuserextra_headersextra_query
extra_bodytimeoutc       
           t        ||||||d      }t        t        t        t        t
        f   |      dgg      }ddi|xs i }| j                  dt        |t        j                        |t        |||	|
      t              S )	   Creates a variation of a given image.

        This endpoint only supports `dall-e-2`.

        Args:
          image: The image to use as the basis for the variation(s). Must be a valid PNG file,
              less than 4MB, and square.

          model: The model to use for image generation. Only `dall-e-2` is supported at this
              time.

          n: The number of images to generate. Must be between 1 and 10.

          response_format: The format in which the generated images are returned. Must be one of `url` or
              `b64_json`. URLs are only valid for 60 minutes after the image has been
              generated.

          size: The size of the generated images. Must be one of `256x256`, `512x512`, or
              `1024x1024`.

          user: A unique identifier representing your end-user, which can help OpenAI to monitor
              and detect abuse.
              [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

          extra_headers: Send extra headers

          extra_query: Add additional query parameters to the request

          extra_body: Add additional JSON properties to the request

          timeout: Override the client-level default timeout for this request, in seconds
        imager1   r2   r3   r4   r5   r=   pathsContent-Typemultipart/form-data/images/variationsr6   r7   r8   r9   bodyfilesoptionscast_to)r   r   r   r   strobject_postr   r   ImageCreateVariationParamsr   r    r(   r=   r1   r2   r3   r4   r5   r6   r7   r8   r9   rE   rF   s                r)   create_variationzImages.create_variation,   s    `  #2	
 d73;#7>ykR ()>X=CVTVXzz  'D'_'_`(+Q[el #  
 	
r+   
backgroundmaskr1   r2   output_compressionoutput_formatqualityr3   r4   r5   r6   r7   r8   r9   c               "   t        |||||||||	|
||d      }t        t        t        t        t
        f   |      dgddgdgg      }ddi|xs i }| j                  dt        |t        j                        |t        ||||	      t        
      S )N  Creates an edited or extended image given one or more source images and a
        prompt.

        This endpoint only supports `gpt-image-1` and `dall-e-2`.

        Args:
          image: The image(s) to edit. Must be a supported image file or an array of images.

              For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than
              50MB. You can provide up to 16 images.

              For `dall-e-2`, you can only provide one image, and it should be a square `png`
              file less than 4MB.

          prompt: A text description of the desired image(s). The maximum length is 1000
              characters for `dall-e-2`, and 32000 characters for `gpt-image-1`.

          background: Allows to set transparency for the background of the generated image(s). This
              parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
              `opaque` or `auto` (default value). When `auto` is used, the model will
              automatically determine the best background for the image.

              If `transparent`, the output format needs to support transparency, so it should
              be set to either `png` (default value) or `webp`.

          mask: An additional image whose fully transparent areas (e.g. where alpha is zero)
              indicate where `image` should be edited. If there are multiple images provided,
              the mask will be applied on the first image. Must be a valid PNG file, less than
              4MB, and have the same dimensions as `image`.

          model: The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are
              supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1`
              is used.

          n: The number of images to generate. Must be between 1 and 10.

          output_compression: The compression level (0-100%) for the generated images. This parameter is only
              supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
              defaults to 100.

          output_format: The format in which the generated images are returned. This parameter is only
              supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The
              default value is `png`.

          quality: The quality of the image that will be generated. `high`, `medium` and `low` are
              only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality.
              Defaults to `auto`.

          response_format: The format in which the generated images are returned. Must be one of `url` or
              `b64_json`. URLs are only valid for 60 minutes after the image has been
              generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1`
              will always return base64-encoded images.

          size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
              (landscape), `1024x1536` (portrait), or `auto` (default value) for
              `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`.

          user: A unique identifier representing your end-user, which can help OpenAI to monitor
              and detect abuse.
              [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

          extra_headers: Send extra headers

          extra_query: Add additional query parameters to the request

          extra_body: Add additional JSON properties to the request

          timeout: Override the client-level default timeout for this request, in seconds
        r=   promptrP   rQ   r1   r2   rR   rS   rT   r3   r4   r5   r=   <array>rQ   r>   r@   rA   /images/editsrC   rD   )r   r   r   r   rI   rJ   rK   r   r   ImageEditParamsr   r    r(   r=   rX   rP   rQ   r1   r2   rR   rS   rT   r3   r4   r5   r6   r7   r8   r9   rE   rF   s                      r)   editzImages.editu   s    x   (&8!."#2
  d73;#7>ySZ\eRfiohpFqr ()>X=CVTVXzz '8'H'HI(+Q[el #  
 	
r+   rP   r1   
moderationr2   rR   rS   rT   r3   r4   styler5   r6   r7   r8   r9   c                   | j                  dt        |||||||||	|
||dt        j                        t	        ||||      t
              S )  
        Creates an image given a prompt.
        [Learn more](https://platform.openai.com/docs/guides/images).

        Args:
          prompt: A text description of the desired image(s). The maximum length is 32000
              characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters
              for `dall-e-3`.

          background: Allows to set transparency for the background of the generated image(s). This
              parameter is only supported for `gpt-image-1`. Must be one of `transparent`,
              `opaque` or `auto` (default value). When `auto` is used, the model will
              automatically determine the best background for the image.

              If `transparent`, the output format needs to support transparency, so it should
              be set to either `png` (default value) or `webp`.

          model: The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or
              `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to
              `gpt-image-1` is used.

          moderation: Control the content-moderation level for images generated by `gpt-image-1`. Must
              be either `low` for less restrictive filtering or `auto` (default value).

          n: The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
              `n=1` is supported.

          output_compression: The compression level (0-100%) for the generated images. This parameter is only
              supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and
              defaults to 100.

          output_format: The format in which the generated images are returned. This parameter is only
              supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`.

          quality: The quality of the image that will be generated.

              - `auto` (default value) will automatically select the best quality for the
                given model.
              - `high`, `medium` and `low` are supported for `gpt-image-1`.
              - `hd` and `standard` are supported for `dall-e-3`.
              - `standard` is the only option for `dall-e-2`.

          response_format: The format in which generated images with `dall-e-2` and `dall-e-3` are
              returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes
              after the image has been generated. This parameter isn't supported for
              `gpt-image-1` which will always return base64-encoded images.

          size: The size of the generated images. Must be one of `1024x1024`, `1536x1024`
              (landscape), `1024x1536` (portrait), or `auto` (default value) for
              `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and
              one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`.

          style: The style of the generated images. This parameter is only supported for
              `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean
              towards generating hyper-real and dramatic images. Natural causes the model to
              produce more natural, less hyper-real looking images.

          user: A unique identifier representing your end-user, which can help OpenAI to monitor
              and detect abuse.
              [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids).

          extra_headers: Send extra headers

          extra_query: Add additional query parameters to the request

          extra_body: Add additional JSON properties to the request

          timeout: Override the client-level default timeout for this request, in seconds
        /images/generationsrX   rP   r1   r_   r2   rR   rS   rT   r3   r4   r`   r5   rC   rE   rG   rH   )rK   r   r   ImageGenerateParamsr   r    r(   rX   rP   r1   r_   r2   rR   rS   rT   r3   r4   r`   r5   r6   r7   r8   r9   s                    r)   generatezImages.generate   st    | zz! $","",*<%2&'6 "  &99" )+Q[el #-  
 	
r+   )returnr&   )ri   r.   r=   r   r1   'Union[str, ImageModel, None] | NotGivenr2   Optional[int] | NotGivenr3   /Optional[Literal['url', 'b64_json']] | NotGivenr4   z?Optional[Literal['256x256', '512x512', '1024x1024']] | NotGivenr5   str | NotGivenr6   Headers | Noner7   Query | Noner8   Body | Noner9   'float | httpx.Timeout | None | NotGivenri   r    "r=   z!Union[FileTypes, List[FileTypes]]rX   rI   rP   =Optional[Literal['transparent', 'opaque', 'auto']] | NotGivenrQ   zFileTypes | NotGivenr1   rk   r2   rl   rR   rl   rS   3Optional[Literal['png', 'jpeg', 'webp']] | NotGivenrT   zIOptional[Literal['standard', 'low', 'medium', 'high', 'auto']] | NotGivenr3   rm   r4   zaOptional[Literal['256x256', '512x512', '1024x1024', '1536x1024', '1024x1536', 'auto']] | NotGivenr5   rn   r6   ro   r7   rp   r8   rq   r9   rr   ri   r    "rX   rI   rP   rt   r1   rk   r_   z+Optional[Literal['low', 'auto']] | NotGivenr2   rl   rR   rl   rS   ru   rT   zOOptional[Literal['standard', 'hd', 'low', 'medium', 'high', 'auto']] | NotGivenr3   rm   r4   z{Optional[Literal['auto', '1024x1024', '1536x1024', '1024x1536', '256x256', '512x512', '1792x1024', '1024x1792']] | NotGivenr`   z0Optional[Literal['vivid', 'natural']] | NotGivenr5   rn   r6   ro   r7   rp   r8   rq   r9   rr   ri   r    
__name__
__module____qualname__r   r*   r/   r   rN   r]   rh    r+   r)   r!   r!      s   + + 1 1 :C&/KTPY( )-$("&;DG
 G
 7	G

 $G
 IG
 NG
 G
 &G
 "G
  G
 9G
 
G
\ U^%.9B&/7@MV]fKT( )-$("&;D+y
 1y
 	y

 Ry
 #y
 7y
 $y
 5y
 Ky
 [y
 Iy
y
 y
$ &%y
& "'y
(  )y
* 9+y
, 
-y
~ U^9BBK&/7@MVclKT BK( )-$("&;D/u
 u
 R	u

 7u
 @u
 $u
 5u
 Ku
 au
 Iu
u
  @!u
" #u
( &)u
* "+u
,  -u
. 9/u
0 
1u
r+   c                  N   e Zd Zed
d       Zedd       Zeeeeeddded		 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZeeeeeeeeeeddded	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZeeeeeeeeeeeddded	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dd	Z	y)r"   c                    t        |       S r%   )AsyncImagesWithRawResponser'   s    r)   r*   zAsyncImages.with_raw_responsei  s     *$//r+   c                    t        |       S r-   ) AsyncImagesWithStreamingResponser'   s    r)   r/   z#AsyncImages.with_streaming_responses  s     055r+   Nr0   c       
        <  K   t        ||||||d      }t        t        t        t        t
        f   |      dgg      }ddi|xs i }| j                  dt        |t        j                         d{   |t        |||	|
      t        	       d{   S 7 %7 w)
r;   r<   r=   r>   r@   rA   rB   NrC   rD   )r   r   r   r   rI   rJ   rK   r   r   rL   r   r    rM   s                r)   rN   zAsyncImages.create_variation|  s     `  #2	
 d73;#7>ykR ()>X=CVTVXZZ ,T3P3k3kll(+Q[el #   
 
 	
l
s$   A0B2B
3 BBBBrO   c               R  K   t        |||||||||	|
||d      }t        t        t        t        t
        f   |      dgddgdgg      }ddi|xs i }| j                  dt        |t        j                         d	{   |t        ||||
      t               d	{   S 7 %7 w)rV   rW   r=   rY   rQ   r>   r@   rA   rZ   NrC   rD   )r   r   r   r   rI   rJ   rK   r   r   r[   r   r    r\   s                      r)   r]   zAsyncImages.edit  s     x   (&8!."#2
  d73;#7>ySZ\eRfiohpFqr ()>X=CVTVXZZ,T3D3T3TUU(+Q[el #   
 
 	
U
s$   A;B'=B#
> B'B%B'%B'r^   c                  K   | j                  dt        |||||||||	|
||dt        j                         d{   t	        ||||      t
               d{   S 7 $7 w)rb   rc   rd   NrC   re   )rK   r   r   rf   r   r    rg   s                    r)   rh   zAsyncImages.generate@  s     | ZZ!,$","",*<%2&'6 "  &99 " )+Q[el #-   
 
 	

s!   7A"A
A"A A" A")ri   r~   )ri   r   rj   rs   rv   rw   r{   r+   r)   r"   r"   h  s   0 0 6 6 :C&/KTPY( )-$("&;DG
 G
 7	G

 $G
 IG
 NG
 G
 &G
 "G
  G
 9G
 
G
\ U^%.9B&/7@MV]fKT( )-$("&;D+y
 1y
 	y

 Ry
 #y
 7y
 $y
 5y
 Ky
 [y
 Iy
y
 y
$ &%y
& "'y
(  )y
* 9+y
, 
-y
~ U^9BBK&/7@MVclKT BK( )-$("&;D/u
 u
 R	u

 7u
 @u
 $u
 5u
 Ku
 au
 Iu
u
  @!u
" #u
( &)u
* "+u
,  -u
. 9/u
0 
1u
r+   c                      e Zd ZddZy)r&   c                    || _         t        j                  |j                        | _        t        j                  |j                        | _        t        j                  |j
                        | _        y N)_imagesr   to_raw_response_wrapperrN   r]   rh   r(   imagess     r)   __init__zImagesWithRawResponse.__init__  s[     0 H H##!
 %<<KK
	 )@@OO
r+   Nr   r!   ri   Nonerx   ry   rz   r   r{   r+   r)   r&   r&         
r+   r&   c                      e Zd ZddZy)r~   c                    || _         t        j                  |j                        | _        t        j                  |j                        | _        t        j                  |j
                        | _        y r   )r   r   async_to_raw_response_wrapperrN   r]   rh   r   s     r)   r   z#AsyncImagesWithRawResponse.__init__  s[     0 N N##!
 %BBKK
	 )FFOO
r+   Nr   r"   ri   r   r   r{   r+   r)   r~   r~     r   r+   r~   c                      e Zd ZddZy)r.   c                    || _         t        |j                        | _        t        |j                        | _        t        |j                        | _        y r   )r   r   rN   r]   rh   r   s     r)   r   z$ImagesWithStreamingResponse.__init__  sI     <##!
 1KK
	 5OO
r+   Nr   r   r{   r+   r)   r.   r.     r   r+   r.   c                      e Zd ZddZy)r   c                    || _         t        |j                        | _        t        |j                        | _        t        |j                        | _        y r   )r   r   rN   r]   rh   r   s     r)   r   z)AsyncImagesWithStreamingResponse.__init__  sI     B##!
 7KK
	 ;OO
r+   Nr   r   r{   r+   r)   r   r     r   r+   r   )2
__future__r   typingr   r   r   r   r   typing_extensionsr	   httpx r   typesr   r   r   _typesr   r   r   r   r   r   _utilsr   r   r   r   _compatr   	_resourcer   r   	_responser   r   _base_clientr   types.image_modelr   types.images_responser    __all__r!   r"   r&   r~   r.   r   r{   r+   r)   <module>r      s    # 7 7 %   [ [ I I \ \ % 9 X / * 2]
#M
_ M
`
M
" M
`

 

 

 

 
r+   