o
    Vh%1                     @   s   d Z ddlZddlZddlZddlZddlZddlZddlm	Z	m
Z
 ddlmZmZmZmZmZmZmZ ddlmZ G dd dZdS )a5  
Module provides functionalities for hyperparameter tuning of the Ultralytics YOLO models for object detection, instance
segmentation, image classification, pose estimation, and multi-object tracking.

Hyperparameter tuning is the process of systematically searching for the optimal set of hyperparameters
that yield the best model performance. This is particularly crucial in deep learning models like YOLO,
where small changes in hyperparameters can lead to significant differences in model accuracy and efficiency.

Examples:
    Tune hyperparameters for YOLO11n on COCO8 at imgsz=640 and epochs=30 for 300 tuning iterations.
    >>> from ultralytics import YOLO
    >>> model = YOLO("yolo11n.pt")
    >>> model.tune(data="coco8.yaml", epochs=10, iterations=300, optimizer="AdamW", plots=False, save=False, val=False)
    N)get_cfgget_save_dir)DEFAULT_CFGLOGGER	callbackscolorstrremove_colorstr
yaml_print	yaml_save)plot_tune_resultsc                   @   s2   e Zd ZdZedfddZdd	d
ZdddZdS )Tunerap  
    A class for hyperparameter tuning of YOLO models.

    The class evolves YOLO model hyperparameters over a given number of iterations by mutating them according to the
    search space and retraining the model to evaluate their performance.

    Attributes:
        space (dict): Hyperparameter search space containing bounds and scaling factors for mutation.
        tune_dir (Path): Directory where evolution logs and results will be saved.
        tune_csv (Path): Path to the CSV file where evolution logs are saved.
        args (dict): Configuration arguments for the tuning process.
        callbacks (list): Callback functions to be executed during tuning.
        prefix (str): Prefix string for logging messages.

    Methods:
        _mutate: Mutates the given hyperparameters within the specified bounds.
        __call__: Executes the hyperparameter evolution across multiple iterations.

    Examples:
        Tune hyperparameters for YOLO11n on COCO8 at imgsz=640 and epochs=30 for 300 tuning iterations.
        >>> from ultralytics import YOLO
        >>> model = YOLO("yolo11n.pt")
        >>> model.tune(
        ...     data="coco8.yaml", epochs=10, iterations=300, optimizer="AdamW", plots=False, save=False, val=False
        ... )

        Tune with custom search space.
        >>> model.tune(space={key1: val1, key2: val2})  # custom search space dictionary
    Nc                 C   s$  | ddpCi ddddddd	d
ddddddddddddddddddddddddd d
d!d!d!d!d!d!d"| _t|d#| _| jj| j_t| j| jjpYd$d%| _d&\| j_| j_| j_| jd' | _	|pst
 | _
td(| _t
|  t| j d)| j d*| j d+ dS ),z
        Initialize the Tuner with configurations.

        Args:
            args (dict): Configuration for hyperparameter evolution.
            _callbacks (list, optional): Callback functions to be executed during tuning.
        spaceNlr0)gh㈵>皙?lrf)g-C6?r   momentum)gffffff?g\(\?333333?weight_decay)        gMbP?warmup_epochs)r   g      @warmup_momentum)r   gffffff?box)      ?g      4@cls)皙?g      @dfl)g?g      @hsv_h)r   r   hsv_s)r   g?hsv_vdegrees)r   g     F@	translatescaleshear)r   g      $@perspective)r   r   )flipudfliplrbgrmosaicmixup
copy_paste)	overridestune)name)NFFztune_results.csvzTuner: z*Initialized Tuner instance with 'tune_dir=z'
uT   💡 Learn about tuning at https://docs.ultralytics.com/guides/hyperparameter-tuning)popr   r   argsresumeexist_okr   r,   tune_dirtune_csvr   get_default_callbacksr   prefixadd_integration_callbacksr   info)selfr.   
_callbacks r9   L/var/www/vscode/kcb/lib/python3.10/site-packages/ultralytics/engine/tuner.py__init__=   sn   	



zTuner.__init__single   皙?r   c                    s   j  rtj j dddddddf }t|t}t|  d| dddf dddf   d }|dksHtdkrVtjt	||d	d  n|d
kri|
|d d|  tj}|tt  tdd  j D }t j}	t|	tdkr|||	|k  ||	 |  | d ddtdksfddt j D }
n fdd j D }
 j D ]#\}t|
| d |
|< t|
| d |
|< t|
| d|
|< q|
S )a  
        Mutate hyperparameters based on bounds and scaling factors specified in `self.space`.

        Args:
            parent (str): Parent selection method: 'single' or 'weighted'.
            n (int): Number of parents to consider.
            mutation (float): Probability of a parameter mutation in any given iteration.
            sigma (float): Standard deviation for Gaussian random number generator.

        Returns:
            (dict): A dictionary containing mutated hyperparameters.
           ,   ndmin	delimiterskiprowsNr   gư>r<   )weightsweightedc                 S   s$   g | ]}t |d kr|d ndqS )   r?   r   )len).0vr9   r9   r:   
<listcomp>   s   $ z!Tuner._mutate.<locals>.<listcomp>r   g      @c                    s*   i | ]\}}|t |d    |  qS rA   floatrJ   ik)rK   xr9   r:   
<dictcomp>   s   * z!Tuner._mutate.<locals>.<dictcomp>c                    s   i | ]	}|t  j|qS r9   )getattrr.   rJ   rR   )r7   r9   r:   rT      s    r=   )r2   existsnploadtxtminrI   argsortrandomchoicesrangereshapesumseedinttimearrayr   valuesonesallrandnclip	enumeratekeysitemsmaxround)r7   parentnmutationsigmafitnesswrgnghyprR   r9   )r7   rK   rS   r:   _mutatel   s2   
(

4 zTuner._mutate
   Tc                    s  t   }d\}}| jd jddd d}| j r:tj| jdddd	jd }t	| j
 d
| j d|d  d t||D ]}|  t	| j
 d|d  d| d  i }	i t| j}
tt|
}|d }z<tdjddg}g |ddd |
 D }tj|ddj}||d  rdnd }t|d }	|dksJ dW n ty } ztd|d  d|  W Y d}~nd}~ww |	dd}t|d gfd!d"| j D  }| j rd#nddgt | j  d }t!| jd$d%d&}|"|dt#t$| d  W d   n	1 sw   Y  tj| jdddd	dddf }|%   |k}|r[|}d'd( |	 D }|&d)D ]}t'(|| jd  qMn
|ret'j)|dd* t*| j | j
 |d  d| d+t   | d,d-| j
 d.t+d/| j d| j
 d0|   d1 d  d| j
 d2| d| j
 d3| d| j
 d4}t	d|   fd5d(t,| j D }t-| jd6 |t.|/| j
d7d d8 t0| jd6  q?dS )9a  
        Execute the hyperparameter evolution process when the Tuner instance is called.

        This method iterates through the number of iterations, performing the following steps in each iteration:

        1. Load the existing hyperparameters or initialize new ones.
        2. Mutate the hyperparameters using the `mutate` method.
        3. Train a YOLO model with the mutated hyperparameters.
        4. Log the fitness score and mutated hyperparameters to a CSV file.

        Args:
            model (Model): A pre-initialized YOLO model to be used for training.
            iterations (int): The number of generations to run the evolution for.
            cleanup (bool): Whether to delete iteration weights to reduce storage space used during tuning.

        Note:
            The method utilizes the `self.tune_csv` Path object to read and log hyperparameters and fitness scores.
            Ensure this path is set correctly in the Tuner instance.
        )NNrF   T)parentsr0   r   r?   r@   rA   rB   zResuming tuning run z from iteration z...zStarting iteration /z with hyperparameters: sysz-mzultralytics.cfg.__init__trainc                 s   s"    | ]\}}| d | V  qdS )=Nr9   rJ   rR   rK   r9   r9   r:   	<genexpr>   s     z!Tuner.__call__.<locals>.<genexpr>)checkzbest.ptzlast.pttrain_metricsztraining failedz5training failure for hyperparameter tuning iteration 
Nrs   r   r=   c                    s   g | ]} | qS r9   r9   rV   )mutated_hypr9   r:   rL      s    z"Tuner.__call__.<locals>.<listcomp> azutf-8)encodingc                 S   s   i | ]
\}}|t |d qS )r=   )rn   r   r9   r9   r:   rT      s    z"Tuner.__call__.<locals>.<dictcomp>z*.pt)ignore_errorsu    iterations complete ✅ (z.2fzs)
zResults saved to boldzBest fitness=z observed at iteration zBest fitness metrics are zBest fitness model is z0Best fitness hyperparameters are printed below.
c                    s&   i | ]\}}|t  |d  f qS rM   rN   rP   )best_idxrS   r9   r:   rT      s   & zbest_hyperparameters.yamlz# )dataheader)1rc   r1   mkdirr2   rW   rX   rY   shaper   r6   r4   r^   ry   varsr.   r   r   
__import__
executablerl   
subprocessrun
returncodetorchload	Exceptionerrorgetrn   r   rk   joinlistopenwritemapstrargmaxglobshutilcopy2rmtreer   r   rj   r
   r   replacer	   )r7   model
iterationscleanupt0best_save_dirbest_metricsstartrQ   metrics
train_argssave_dirweights_dirlaunchcmdreturn_code	ckpt_fileers   log_rowheadersfbest_is_currentckptr   r   r9   )r   r   rS   r:   __call__   s   

$&&$, 
&
zTuner.__call__)r<   r=   r>   r   )Nrz   T)__name__
__module____qualname____doc__r   r;   ry   r   r9   r9   r9   r:   r      s
    
/.r   )r   r\   r   r   rc   numpyrX   r   ultralytics.cfgr   r   ultralytics.utilsr   r   r   r   r   r	   r
   ultralytics.utils.plottingr   r   r9   r9   r9   r:   <module>   s   $