o
    Wh                     @   sj   d dl Z d dlZd dlZd dlZd dlZddlmZ ddlmZ de	fddZ
dd	 Zd
d Zdd ZdS )    N   )USER_CONFIG_DIR)	TORCH_1_9returnc                  C   sJ   t  t jt j} | d |  d W  d   S 1 sw   Y  dS )a   
    Find a free port on localhost.

    It is useful in single-node training when we don't want to connect to a real main node but have to set the
    `MASTER_PORT` environment variable.

    Returns:
        (int): The available network port number.
    )z	127.0.0.1r   r   N)socketAF_INETSOCK_STREAMbindgetsockname)s r   J/var/www/vscode/kcb/lib/python3.10/site-packages/ultralytics/utils/dist.pyfind_free_network_port   s   


$r   c                 C   s   | j j d| j j dd\}}dt| j d| d| d| dt| jd| jj d	}t	d
 j
dd tjdt|  dddt	d
 dd}|| W d   |jS 1 sYw   Y  |jS )a*  
    Generate a DDP (Distributed Data Parallel) file for multi-GPU training.

    This function creates a temporary Python file that enables distributed training across multiple GPUs.
    The file contains the necessary configuration to initialize the trainer in a distributed environment.

    Args:
        trainer (object): The trainer object containing training configuration and arguments.
                         Must have args attribute and be a class instance.

    Returns:
        (str): Path to the generated temporary DDP file.

    Notes:
        The generated file is saved in the USER_CONFIG_DIR/DDP directory and includes:
        - Trainer class import
        - Configuration overrides from the trainer arguments
        - Model path configuration
        - Training initialization code
    .r   zd
# Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
overrides = z&

if __name__ == "__main__":
    from z import z
    from ultralytics.utils import DEFAULT_CFG_DICT

    cfg = DEFAULT_CFG_DICT.copy()
    cfg.update(save_dir='')   # handle the extra key 'save_dir'
    trainer = z9(cfg=cfg, overrides=overrides)
    trainer.args.model = "	model_urlz "
    results = trainer.train()
DDPT)exist_ok_temp_.pyzw+zutf-8F)prefixsuffixmodeencodingdirdeleteN)	__class__
__module____name__rsplitvarsargsgetattrhub_sessionmodelr   mkdirtempfileNamedTemporaryFileidwritename)trainermoduler)   contentfiler   r   r   generate_ddp_file   s6   "

		r.   c                 C   sV   ddl }|jst|j t|}trdnd}t }tj	d|d|  d| |g}||fS )a  
    Generate command for distributed training.

    Args:
        world_size (int): Number of processes to spawn for distributed training.
        trainer (object): The trainer object containing configuration for distributed training.

    Returns:
        cmd (List[str]): The command to execute for distributed training.
        file (str): Path to the temporary file created for DDP training.
    r   Nztorch.distributed.runztorch.distributed.launchz-mz--nproc_per_nodez--master_port)
__main__resumeshutilrmtreesave_dirr.   r   r   sys
executable)
world_sizer*   r/   r-   dist_cmdportcmdr   r   r   generate_ddp_commandN   s   r:   c                 C   s$   t |  d|v rt| dS dS )a0  
    Delete temporary file if created during distributed data parallel (DDP) training.

    This function checks if the provided file contains the trainer's ID in its name, indicating it was created
    as a temporary file for DDP training, and deletes it if so.

    Args:
        trainer (object): The trainer object used for distributed training.
        file (str): Path to the file that might need to be deleted.

    Examples:
        >>> trainer = YOLOTrainer()
        >>> file = "/tmp/ddp_temp_123456789.py"
        >>> ddp_cleanup(trainer, file)
    r   N)r'   osremove)r*   r-   r   r   r   ddp_cleanupe   s   r=   )r;   r1   r   r4   r%    r   torch_utilsr   intr   r.   r:   r=   r   r   r   r   <module>   s   2