
    hYF                       S r SSKJr  SSKrSSKJr  SSKJr  SSKrSSK	r
SSKJrJrJrJrJrJrJrJr  SSKJr  SSKJs  Js  Jr  SSKJr  SS	KJr  / S
Qr " S S5      r  " S S\ 5      r! " S S\ 5      r" " S S\5      r# " S S5      r$\\          SS j5       5       r%SS jr&\\SS j5       5       r'\\SS j5       5       r(\\SS j5       5       r)S S jr*g)!aE  Functional implementations for domain adaptation image transformations.

This module provides low-level functions and classes for performing domain adaptation
between images. It includes implementations for histogram matching, Fourier domain adaptation,
and pixel distribution matching with various normalization techniques.
    )annotationsN)deepcopy)Literal)add_weightedclipclipped
from_floatget_num_channelspreserve_channel_dimto_floatuint8_io)Protocol)PCA)MONO_CHANNEL_DIMENSIONS)adapt_pixel_distributionapply_histogramfourier_domain_adaptationc                  F    \ rS rSrS	S jrS
S jrSS jrSS jrSS jrSr	g)
BaseScaler   c                J    S U l         S U l        S U l        S U l        S U l        g N)data_mindata_maxmeanvarscale)selfs    z/var/www/fran/franai/venv/lib/python3.13/site-packages/albumentations/augmentations/mixing/domain_adaptation_functional.py__init__BaseScaler.__init__   s%    +/+/'+	&*(,
    c                    [         er   NotImplementedErrorr   xs     r   fitBaseScaler.fit&       !!r"   c                    [         er   r$   r&   s     r   	transformBaseScaler.transform)   r*   r"   c                F    U R                  U5        U R                  U5      $ r   )r(   r,   r&   s     r   fit_transformBaseScaler.fit_transform,   s    ~~a  r"   c                    [         er   r$   r&   s     r   inverse_transformBaseScaler.inverse_transform0   r*   r"   )r   r   r   r   r   NreturnNoner'   
np.ndarrayr5   r6   r'   r8   r5   r8   )
__name__
__module____qualname____firstlineno__r    r(   r,   r/   r2   __static_attributes__ r"   r   r   r      s    -""!"r"   r   c                  N   ^  \ rS rSrSSU 4S jjjrS	S jrS
S jrS
S jrSrU =r	$ )MinMaxScaler4   c                X   > [         TU ]  5         US   U l        US   U l        S U l        g )Nr      )superr    minmax
data_range)r   feature_range	__class__s     r   r    MinMaxScaler.__init__5   s-    '*'*-1r"   c                    [         R                  " USS9U l        [         R                  " USS9U l        U R                  U R                  -
  U l        SU R
                  U R
                  S:H  '   g Nr   axisrD   )nprF   r   rG   r   rH   r&   s     r   r(   MinMaxScaler.fit;   sQ    qq)qq)--$--7011,-r"   c                   U R                   b  U R                  b  U R                  c  [        S5      e[        R
                  " XR                   5      R                  [        5      n[        R                  " X R                  US9  [        R                  " X R                  U R                  -
  US9  [        R                  " X R                  US9  U$ )NpThis MinMaxScaler instance is not fitted yet. Call 'fit' with appropriate arguments before using this estimator.)out)r   r   rH   
ValueErrorrP   subtractastypefloatdividemultiplyrG   rF   addr   r'   x_stds      r   r,   MinMaxScaler.transformB   s    == DMM$9T__=TU 
 A}}-44U;
		%e4
EHHtxx/e<
uhhE*r"   c                   U R                   b  U R                  b  U R                  c  [        S5      eXR                  -
  U R
                  U R                  -
  -  R                  [        5      nX R                  -  U R                   -   $ )NrS   )r   r   rH   rU   rF   rG   rW   rX   r\   s      r   r2   MinMaxScaler.inverse_transformP   sq    == DMM$9T__=TU  hh,488dhh#67??F&66r"   )r   r   rH   rG   rF   ))g        g      ?)rI   ztuple[float, float]r5   r6   r7   r9   
r:   r;   r<   r=   r    r(   r,   r2   r>   __classcell__rJ   s   @r   rA   rA   4   s!    2 227 7r"   rA   c                  J   ^  \ rS rSrSU 4S jjrSS jrS	S jrS	S jrSrU =r	$ )
StandardScalerZ   c                "   > [         TU ]  5         g r   )rE   r    )r   rJ   s    r   r    StandardScaler.__init__[   s    r"   c                    [         R                  " USS9U l        [         R                  " USS9U l        [         R                  " U R                  5      U l        SU R                  U R                  S:H  '   g rM   )rP   r   r   sqrtr   r&   s     r   r(   StandardScaler.fit^   sP    GGAA&	66!!$WWTXX&
&'

4::?#r"   c                    U R                   b  U R                  c  [        S5      eXR                   -
  U R                  -  $ NzrThis StandardScaler instance is not fitted yet. Call 'fit' with appropriate arguments before using this estimator.r   r   rU   r&   s     r   r,   StandardScaler.transforme   s?    99

 2U  II++r"   c                    U R                   b  U R                  c  [        S5      eXR                  -  U R                   -   $ rm   rn   r&   s     r   r2    StandardScaler.inverse_transformm   s?    99

 2U  JJ$))++r"   )r   r   r   r4   r7   r9   ra   rc   s   @r   re   re   Z   s    (,, ,r"   re   c                      \ rS rSr\R
                  SS j5       r\R
                  SS	S jj5       r\R
                  SS	S jj5       rSr	g)
TransformerInterfacev   c                    g r   r?   r&   s     r   r2   &TransformerInterface.inverse_transformw   s    >Ar"   Nc                    g r   r?   r   r'   ys      r   r(   TransformerInterface.fitz   s    MPr"   c                    g r   r?   rx   s      r   r,   TransformerInterface.transform}   s    SVr"   r?   r9   r   )r'   r8   ry   znp.ndarray | Noner5   r8   )
r:   r;   r<   r=   abcabstractmethodr2   r(   r,   r>   r?   r"   r   rs   rs   v   s>    A AP PV Vr"   rs   c                  t    \ rS rSr S     SS jjrSS jrSS jrSS jrSS jr\	SS j5       r
SS jrS	rg
)DomainAdapter   c                    Uu  U l         U l        [        U5      U l        Xl        [        U5      U l        U R                  R                  U R                  U5      5        g r   )	color_in	color_outr   source_transformertarget_transformerr
   num_channelsr(   flatten)r   transformerref_imgcolor_conversionss       r   r    DomainAdapter.__init__   sO     ):%t~"*;"7"-,W5##DLL$9:r"   c                `    U R                   c  U$ [        R                  " XR                   5      $ r   )r   cv2cvtColorr   imgs     r   to_colorspaceDomainAdapter.to_colorspace   s#    mm+sQc==1QQr"   c                    U R                   c  U$ [        R                  " [        U[        R
                  SS9U R                   5      $ )NTinplace)r   r   r   r   rP   uint8r   s     r   from_colorspaceDomainAdapter.from_colorspace   s3    >>!J||Dbhh=t~~NNr"   c                r    U R                  U5      n[        U5      nUR                  SU R                  5      $ )N)r   r   reshaper   r   s     r   r   DomainAdapter.flatten   s2      %sm{{2t0011r"   c                    [        U[        R                  SS9nU R                  S:X  a   U R	                  UR                  X#5      5      $ U R	                  UR                  X#U R                  5      5      $ )NTr   rD   )r   rP   r   r   r   r   )r   pixelsheightwidths       r   reconstructDomainAdapter.reconstruct   s^    fbhh5!''v(EFF##FNN6$BSBS$TUUr"   c                j    [         R                  " [         R                  " U R                  5      5      $ r   )rP   signtracecomponents_)r'   s    r   	_pca_signDomainAdapter._pca_sign   s    wwrxx.//r"   c                &   UR                   S S u  p#U R                  U5      nU R                  R                  U5        [	        U R
                  S5      (       ar  [	        U R                  S5      (       aW  U R                  U R
                  5      U R                  U R                  5      :w  a  U R
                  =R                  S-  sl        U R                  R                  U5      nU R
                  R                  U5      nU R                  XbU5      $ )N   r   r   )shaper   r   r(   hasattrr   r   r   r,   r2   r   )r   imager   r   r   representationresults          r   __call__DomainAdapter.__call__   s    BQe$##F+ D++];;//??t6674>>$JaJa;bb##//25/00::6B((::>J66r"   )r   r   r   r   r   N))NN)r   rs   r   r8   r   ztuple[None, None])r   r8   r5   r8   )r   r8   r   intr   r   r5   r8   r9   )r   r8   r5   r8   )r:   r;   r<   r=   r    r   r   r   r   staticmethodr   r   r>   r?   r"   r   r   r      sX    
 0<	
;)
; 
; -	
;RO
2
V 0 07r"   r   c                ^   U R                   UR                   :w  a  [        S5      e[        U 5      n[        U5      nXE:w  a  [        S5      eUS:X  a,  [        R                  " U 5      n [        R                  " U5      nU R
                  UR
                  :w  a0  [        R                  " XR
                  SS [        R                  S9nU R                   nU[        R                  :X  a4  [        U [        R                  5      n [        U[        R                  5      n[        [        [        S.U   " 5       n[        XqS9nU" U 5      R!                  [        R                  5      n	U R!                  [        R                  5      SU-
  -  X-  -   n
U[        R                  :X  a  U
$ [#        U
5      $ )	ab  Adapt the pixel distribution of an image to match a reference image.

This function adapts the pixel distribution of an image to match a reference image
using a specified transformation type and weight.

Args:
    img (np.ndarray): The input image to be adapted.
    ref (np.ndarray): The reference image.
    transform_type (Literal["pca", "standard", "minmax"]): The type of transformation to use.
    weight (float): The weight of the transformation.

Returns:
    np.ndarray: The adapted image.

Raises:
    ValueError: If the input image and reference image have different dtypes or numbers of channels.

z9Input image and reference image must have the same dtype.zFInput image and reference image must have the same number of channels.rD   Nr   )dsizeinterpolation)pcastandardminmax)r   r   )dtyperU   r
   rP   squeezer   r   resize
INTER_AREAfloat32r	   r   r   re   rA   r   rW   r   )r   reftransform_typeweightimg_num_channelsref_num_channelsoriginal_dtyper   adaptertransformedr   s              r   r   r      sB   4 yyCIITUU',',+abb1jjojjo
yyCIIjjIIbqMPYYN#bhh'bhh'>\RSabdKAG#,%%bjj1KZZ

#q6z2[5IIF#rxx/6EXf5EEr"   c                   U R                   S S n[        [        R                  " [	        U5      U-  5      5      n[
        R                  " U5      u  pVUu  px[        S[        Xd-
  5      5      [	        [        Xd-   5      U5      p[        S[        XT-
  5      5      [	        [        XT-   5      U5      pXU
2X24   X	U
2X24'   U $ )Nr   r   )r   r   rP   floorrF   
fgeometriccenterrG   )amp_srcamp_trgbetaimage_shapebordercenter_xcenter_yr   r   h1h2w1w2s                r   low_freq_mutater      s    --#K#k*T123F#**;7HMFC)*+SX5F1G-PC)*+SX5F1G-O#rE25L1GrE25LNr"   c                ^   U R                  [        R                  5      nUR                  [        R                  5      nUR                  [        :X  a  [        R
                  " USS9nUR                  [        :X  a  [        R
                  " USS9nUR                  S   n[        R                  " U5      n[        U5       GHg  n[        R                  R                  USS2SS2U4   5      n[        R                  R                  USS2SS2U4   5      n	[        R                  R                  U5      n
[        R                  R                  U	5      n[        R                  " U
5      [        R                  " U
5      p[        R                  " U5      n[        UR                  5       X5      n[        R                  R!                  U[        R"                  " SU-  5      -  5      n[        R                  R%                  U5      n[        R&                  " U5      USS2SS2U4'   GMj     U$ )a$
  Apply Fourier Domain Adaptation to the input image using a target image.

This function performs domain adaptation in the frequency domain by modifying the amplitude
spectrum of the source image based on the target image's amplitude spectrum. It preserves
the phase information of the source image, which helps maintain its content while adapting
its style to match the target image.

Args:
    img (np.ndarray): The source image to be adapted. Can be grayscale or RGB.
    target_img (np.ndarray): The target image used as a reference for adaptation.
        Should have the same dimensions as the source image.
    beta (float): The adaptation strength, typically in the range [0, 1].
        Higher values result in stronger adaptation towards the target image's style.

Returns:
    np.ndarray: The adapted image with the same shape and type as the input image.

Raises:
    ValueError: If the source and target images have different shapes.

Note:
    - Both input images are converted to float32 for processing.
    - The function handles both grayscale (2D) and color (3D) images.
    - For grayscale images, an extra dimension is added to facilitate uniform processing.
    - The adaptation is performed channel-wise for color images.
    - The output is clipped to the valid range and preserves the original number of channels.

The adaptation process involves the following steps for each channel:
1. Compute the 2D Fourier Transform of both source and target images.
2. Shift the zero frequency component to the center of the spectrum.
3. Extract amplitude and phase information from the source image's spectrum.
4. Mutate the source amplitude using the target amplitude and the beta parameter.
5. Combine the mutated amplitude with the original phase.
6. Perform the inverse Fourier Transform to obtain the adapted channel.

The `low_freq_mutate` function (not shown here) is responsible for the actual
amplitude mutation, focusing on low-frequency components which carry style information.

Example:
    >>> import numpy as np
    >>> import albumentations as A
    >>> source_img = np.random.rand(100, 100, 3).astype(np.float32)
    >>> target_img = np.random.rand(100, 100, 3).astype(np.float32)
    >>> adapted_img = A.fourier_domain_adaptation(source_img, target_img, beta=0.5)
    >>> assert adapted_img.shape == source_img.shape

References:
    FDA: Fourier Domain Adaptation for Semantic Segmentation: Yang and Soatto, 2020, CVPR
        https://openaccess.thecvf.com/content_CVPR_2020/papers/Yang_FDA_Fourier_Domain_Adaptation_for_Semantic_Segmentation_CVPR_2020_paper.pdf

r   rN   Ny              ?)rW   rP   r   ndimr   expand_dimsr   
zeros_likerangefftfft2fftshiftabsangler   copy	ifftshiftexpifft2real)r   
target_imgr   src_imgtrg_imgr   
src_in_trg
channel_idfft_srcfft_trgfft_src_shiftedfft_trg_shiftedr   pha_srcr   mutated_ampfft_src_mutatedsrc_in_trg_channels                     r   r   r      s   l jj$G

+G||....r2||....r2==$L w'JL)
&&++gaJ&678&&++gaJ&678 &&//'2&&//'2 66/2BHH_4M&&) &gllngD &&**;W9M+MN  VV\\/: (*ww/A'B
1a#$/ *2 r"   c                0   U R                   SS UR                   SS :w  a0  [        R                  " XR                   S   U R                   S   4S9n[        R                  " U 5      n [        R                  " U5      n[        X5      n[        X2U SU-
  5      $ )a6  Apply histogram matching to an input image using a reference image and blend the result.

This function performs histogram matching between the input image and a reference image,
then blends the result with the original input image based on the specified blend ratio.

Args:
    img (np.ndarray): The input image to be transformed. Can be either grayscale or RGB.
        Supported dtypes: uint8, float32 (values should be in [0, 1] range).
    reference_image (np.ndarray): The reference image used for histogram matching.
        Should have the same number of channels as the input image.
        Supported dtypes: uint8, float32 (values should be in [0, 1] range).
    blend_ratio (float): The ratio for blending the matched image with the original image.
        Should be in the range [0, 1], where 0 means no change and 1 means full histogram matching.

Returns:
    np.ndarray: The transformed image after histogram matching and blending.
        The output will have the same shape and dtype as the input image.

Supported image types:
    - Grayscale images: 2D arrays
    - RGB images: 3D arrays with 3 channels
    - Multispectral images: 3D arrays with more than 3 channels

Note:
    - If the input and reference images have different sizes, the reference image
      will be resized to match the input image's dimensions.
    - The function uses a custom implementation of histogram matching based on OpenCV and NumPy.
    - The @clipped and @preserve_channel_dim decorators ensure the output is within
      the valid range and maintains the original number of dimensions.

Nr   rD   r   )r   )r   r   r   rP   r   match_histogramsr   )r   reference_imageblend_ratiomatcheds       r   r   r   \  s    F yy!}--bq11**_YYq\399UV<<XY
**S/Cjj1O s4G c1{?CCr"   c                b   UR                   [        R                  :w  a  [        U[        R                  5      nU R                  UR                  :w  a  [        S5      eU R                  S:X  a  [        R                  " U SS9n UR                  S:X  a  [        R                  " USS9n[        R                  " U R                  [        R                  S9nU R                  S   n[        U5       H<  n[        U SU4   USU4   5      R                  [        R                  5      nXRSU4'   M>     U$ )a  Adjust an image so that its cumulative histogram matches that of another.

The adjustment is applied separately for each channel.

Args:
    image (np.ndarray): Input image. Can be gray-scale or in color.
    reference (np.ndarray): Image to match histogram of. Must have the same number of channels as image.
    channel_axis (int | None): If None, the image is assumed to be a grayscale (single channel) image.
        Otherwise, this indicates which axis of the array corresponds to channels.

Returns:
    np.ndarray: Transformed input image.

Raises:
    ValueError: Thrown when the number of channels in the input image and the reference differ.

z<Image and reference must have the same number of dimensions.r   r   rN   )r   .)r   rP   r   r	   r   rU   r   emptyr   r   _match_cumulative_cdfrW   )r   	referencer   r   channelmatched_channels         r   r   r     s    ( "(("y"((3	zzY^^#WXX zzQu2.~~NN926	hhu{{"((3G;;r?L&/c7l0CYsT[|E\]ddegememn /W ' Nr"   c                   U R                  S5      n[        R                  " U5      n[        R                  " UR                  S5      5      n[        R                  " U5      S   nXE   n[        R                  " U5      U R
                  -  n[        R                  " U5      UR
                  -  n[        R                  " XgU5      nX   R                  U R                  5      R                  [        R                  5      $ )Nr   r   )
r   rP   bincountnonzerocumsumsizeinterpr   rW   r   )	sourcetemplate
src_lookup
src_countstmpl_countstmpl_valuessrc_quantilestmpl_quantilesinterp_a_valuess	            r   r   r     s    #JZ(J++h..r23K **[)!,K*K IIj)FKK7MYY{+hmm;Nii{KO&..v||<CCBHHMMr"   )
r   r8   r   r8   r   z$Literal['pca', 'standard', 'minmax']r   rX   r5   r8   )r   r8   r   r8   r   rX   r5   r8   )r   r8   r   r8   r   rX   r5   r8   )r   r8   r   r8   r   rX   r5   r8   )r   r8   r   r8   r5   r8   )r  r8   r  r8   r5   r8   )+__doc__
__future__r   r}   r   r   typingr   r   numpyrP   albucorer   r   r   r	   r
   r   r   r   typing_extensionsr   1albumentations.augmentations.geometric.functionalaugmentations	geometric
functionalr   "albumentations.augmentations.utilsr   $albumentations.core.type_definitionsr   __all__r   rA   re   rs   r   r   r   r   r   r   r   r?   r"   r   <module>r     s1   # 
   
  x x x & F F 2 H" ",#7: #7L,Z ,8W8 W27 27j 	3F	3F	3F 93F 	3F
 3F  	3Fl 	Z  	Zz 	+D  	+D\ 
&  
&RNr"   