o
    Vhe                     @   s  d Z ddlZddlZddlZddlZddlZddlZddlZddlm	Z	 ddl
mZ ddlmZ ddlmZ ddlZddlZddlmZ ddlmZmZ dd	lmZ dd
lmZ ddlmZmZ ddlmZm Z  ddl!m"Z"m#Z#m$Z$m%Z% ddl&m'Z'm(Z(m)Z)m*Z* ddl+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9 ddl:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@ ddlAmBZBmCZCmDZD ddlEmFZFmGZG ddlHmIZImJZJ ddlKmLZLmMZM ddlNmOZOmPZPmQZQ dd ZRdd ZSdd ZTdd ZUe	dd ZVG d d! d!ZWG d"d# d#ejXjYZZG d$d% d%ejXjYZ[dS )&a
  
Export a YOLO PyTorch model to other formats. TensorFlow exports authored by https://github.com/zldrobit.

Format                  | `format=argument`         | Model
---                     | ---                       | ---
PyTorch                 | -                         | yolo11n.pt
TorchScript             | `torchscript`             | yolo11n.torchscript
ONNX                    | `onnx`                    | yolo11n.onnx
OpenVINO                | `openvino`                | yolo11n_openvino_model/
TensorRT                | `engine`                  | yolo11n.engine
CoreML                  | `coreml`                  | yolo11n.mlpackage
TensorFlow SavedModel   | `saved_model`             | yolo11n_saved_model/
TensorFlow GraphDef     | `pb`                      | yolo11n.pb
TensorFlow Lite         | `tflite`                  | yolo11n.tflite
TensorFlow Edge TPU     | `edgetpu`                 | yolo11n_edgetpu.tflite
TensorFlow.js           | `tfjs`                    | yolo11n_web_model/
PaddlePaddle            | `paddle`                  | yolo11n_paddle_model/
MNN                     | `mnn`                     | yolo11n.mnn
NCNN                    | `ncnn`                    | yolo11n_ncnn_model/
IMX                     | `imx`                     | yolo11n_imx_model/
RKNN                    | `rknn`                    | yolo11n_rknn_model/

Requirements:
    $ pip install "ultralytics[export]"

Python:
    from ultralytics import YOLO
    model = YOLO('yolo11n.pt')
    results = model.export(format='onnx')

CLI:
    $ yolo mode=export model=yolo11n.pt format=onnx

Inference:
    $ yolo predict model=yolo11n.pt                 # PyTorch
                         yolo11n.torchscript        # TorchScript
                         yolo11n.onnx               # ONNX Runtime or OpenCV DNN with dnn=True
                         yolo11n_openvino_model     # OpenVINO
                         yolo11n.engine             # TensorRT
                         yolo11n.mlpackage          # CoreML (macOS-only)
                         yolo11n_saved_model        # TensorFlow SavedModel
                         yolo11n.pb                 # TensorFlow GraphDef
                         yolo11n.tflite             # TensorFlow Lite
                         yolo11n_edgetpu.tflite     # TensorFlow Edge TPU
                         yolo11n_paddle_model       # PaddlePaddle
                         yolo11n.mnn                # MNN
                         yolo11n_ncnn_model         # NCNN
                         yolo11n_imx_model          # IMX

TensorFlow.js:
    $ cd .. && git clone https://github.com/zldrobit/tfjs-yolov5-example.git && cd tfjs-yolov5-example
    $ npm install
    $ ln -s ../../yolo11n_web_model public/yolo11n_web_model
    $ npm start
    N)contextmanager)deepcopy)datetime)Path)__version__)	TASK2DATAget_cfg)build_dataloader)YOLODataset)check_cls_datasetcheck_det_dataset)check_class_namesdefault_class_names)C2fClassifyDetectRTDETRDecoder)ClassificationModelDetectionModelSegmentationModel
WorldModel)ARM64DEFAULT_CFGIS_COLABLINUXLOGGERMACOSMACOS_VERSION
RKNN_CHIPSROOTWINDOWS	callbackscolorstrget_default_args	yaml_save)IS_PYTHON_MINIMUM_3_12check_imgszcheck_is_path_safecheck_requirementscheck_versionis_sudo_available)attempt_download_assetget_github_assetssafe_download)export_engineexport_onnx)	file_sizespaces_in_path)Profilenms_rotated)
TORCH_1_13get_latest_opsetselect_devicec                  C   s.  dddddg gdddddg dgd	d
dddg dgdddddg dgdddddg dgdddddg dgdddddg dgddd ddd!ggd"d#d$ddg d%gd&d'd(ddg gd)d*d+ddg dgd,d-d.ddd!ggd/d0d1ddg d2gd3d4d5ddd!d6ggd7d8d9ddd:d;ggd<d=d>ddd!d?ggg} t tg d@t|  S )Az7Return a dictionary of Ultralytics YOLO export formats.PyTorch-z.ptTTorchScripttorchscript.torchscript)batchoptimizehalfnmsONNXonnx.onnx)r<   dynamicr>   opsetsimplifyr?   OpenVINOopenvino_openvino_modelF)r<   rC   r>   int8r?   fractionTensorRTengine.engine)r<   rC   r>   rI   rE   r?   rJ   CoreMLcoreml
.mlpackage)r<   r>   rI   r?   zTensorFlow SavedModelsaved_model_saved_model)r<   rI   kerasr?   zTensorFlow GraphDefpb.pbr<   zTensorFlow Litetflite.tflite)r<   r>   rI   r?   rJ   zTensorFlow Edge TPUedgetpu_edgetpu.tflitezTensorFlow.jstfjs
_web_modelPaddlePaddlepaddle_paddle_modelMNNmnn.mnn)r<   r>   rI   NCNNncnn_ncnn_modelr>   IMXimx
_imx_modelrI   rJ   RKNNrknn_rknn_modelname)FormatArgumentSuffixCPUGPU	Arguments)dictzipx rv   O/var/www/vscode/kcb/lib/python3.10/site-packages/ultralytics/engine/exporter.pyexport_formatsp   s<   	 rx   c                 C   s~   g d}|dusJ d|  ddddd}t t|}|D ]}t||dt||dk}|r<||v s<J d| d|  d	qdS )
ao  
    Validate arguments based on the export format.

    Args:
        format (str): The export format.
        passed_args (Namespace): The arguments used during export.
        valid_args (list): List of valid arguments for the format.

    Raises:
        AssertionError: If an unsupported argument is used, or if the format lacks supported argument listings.
    )r>   rI   rC   rS   r?   r<   rJ   Nu"   ERROR ❌️ valid arguments for 'z' not listed.   )r<   datadeviceu   ERROR ❌️ argument 'z' is not supported for format='')r   r   getattr)formatpassed_args
valid_argsexport_argscustomdefault_argsargnot_defaultrv   rv   rw   validate_args   s   
r   c                 C   sP   g g }}| j D ]}||j ||j qtdd tt|t| D S )z3Return TensorFlow GraphDef model output node names.c                 s   s$    | ]}| d s| dV  qdS )NoOpz:0N)
startswith.0ru   rv   rv   rw   	<genexpr>   s   " zgd_outputs.<locals>.<genexpr>)nodeappendrk   extendinputsortedlistset)gd	name_list
input_listr   rv   rv   rw   
gd_outputs   s
   

"r   c                    s   t   fdd}|S )z(YOLO export decorator, i.e. @try_export.c               
      s    d }d}z7t  }| i |\}}W d   n1 sw   Y  t| d|jdd| dt|dd ||fW S  tyZ } zt| d	|jdd
|  |d}~ww )zExport a model.prefix        Nu    export success ✅ .1fzs, saved as 'z' ( MB)z export failure zs: )r2   r   infotr0   	Exceptionerror)argskwargsr   dtfmodele
inner_args
inner_funcrv   rw   
outer_func   s   ,
ztry_export.<locals>.outer_func)r#   )r   r   rv   r   rw   
try_export   s   r   c                 #   sP    | j r#| jr#| jdkr#tj dd fdd
}|t_dV   t_dS dV  dS )z|
    Workaround for ONNX torch.arange incompatibility with FP16.

    https://github.com/pytorch/pytorch/issues/148041.
    rA   N)dtypec                    s    |i | | S )zPReturn a 1-D tensor of size with values from the interval and common difference.)to)r   r   r   funcrv   rw   arange      zarange_patch.<locals>.arange)rC   r>   r~   torchr   )r   r   rv   r   rw   arange_patch   s   

r   c                   @   s  e Zd ZdZeddfddZdCdefddZdDd	d
Ze	e
dfddZe	e
dfddZe	e
dfddZe	e
dfddZe	e
dfddZe	e
dfddZe	e
dfddZe	de
d fd!d"Ze	e
d#fd$d%Ze	e
d&fd'd(Ze	e
d)fd*d+Ze	de
d,fd-d.Ze	e
d/fd0d1Ze	e
d2fd3d4Ze	e
d5fd6d7ZdEd9d:Zde
d;fd<d=Zd>efd?d@Zd>efdAdBZdS )FExporterz
    A class for exporting a model.

    Attributes:
        args (SimpleNamespace): Configuration for the exporter.
        callbacks (list, optional): List of callback functions.
    Nc                 C   sB   t ||| _| jj dv rdtjd< |pt | _t|  dS )a
  
        Initialize the Exporter class.

        Args:
            cfg (str, optional): Path to a configuration file.
            overrides (dict, optional): Configuration overrides.
            _callbacks (dict, optional): Dictionary of callback functions.
        >   rO   mlmodelpython&PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATIONN)	r   r   r~   lowerosenvironr!   get_default_callbacksadd_integration_callbacks)selfcfg	overrides
_callbacksrv   rv   rw   __init__   s
   	
zExporter.__init__returnc           )         s6  |  d t }| jj   dv rd  dv rd t }t|d dd } |vrWd	dl}|j |dd
d}|sEt	d  d| t
d  d|d	  d |d	   fdd|D }t|dkrpt	d  d| |\}}	}
}}}}}}}}}}}}t|||||f}d} dkr| jjdu rt
d d| j_ dkrdt| jjv r| jjdd }d| j_|dv sJ d| jj d|r| jjdu rtj rt
d d| j_t| jjdu rdn| jj| _|d |dd  t | j |r| jjst
d d| j_|jdkrt	d t|d!s"t |_t|j|_| jjr;| jjr;t
d" d#| j_| jjrS|	rS| jjdkrSt
d$ d#| j_t| jj |j!d%d&| _ | jjrj|rjd| j_"| jj#r|rvJ d'| jjdksJ d(|r| jj$st
d) d*| j_$| jj$ | j_$| jj$t%v sJ d+| jj$ d,t% d| jjr|rt&|d-d#rJ d.| jj'rt(|t)rJ d/|rt*rt+rJ d0t&|d-d#rt
d1 d#| j_'| jj,pd2| j_,|rt+rt*rt-d3| jj.dkrt
d4 d| j_.t(|t/r!t
d5 d|_0| jjrD| jj1sDt2j1p6t3t&|d6d | j_1t
d7| jj1 d8 |rQt*rQt+rQt-d9tj4| jj.|j56d:d;g| j R  7| j}t8t&|d<dp|t&|d=dp||j56d=d>}|j9d?v rt8|j$}t:|7| j}|; D ]}d#|_<q|=  |>  |? }|rd	d@l@mA} ||}|B D ]it(tCrd_Dt(tEtFfr| jj"_"d_D| jj_| jjG_G| jj'o| _Hnt(tIr|sjJ_Kt(tEr!|r!d	dAlLmM} dBdC |tjNfdDd| j D ddEj!dFD \_O_Pqd}tQd%D ]}| jj'r;|s;tR|| j|n||}q)| jjrY|	rY| jjdkrY| | }}tSjTdGtjUjVdH tSjTdGtWdH tSjTdGtXdH || _Y|| _Z|| _[t(|tj\rt|j]ntdIdC |D | _^t8| jZj56d=| j[j_`dJdK| _at|dLrt(|jtbr|jdM nd>} dN| ja dO| rdP|  nd> }!|!dQtcd e tfdRdStgth|j!|j| jj.| j |jfdTdU| jD |j56d:d;dV| _i|dur|| jid< |jdWkr
|jZd jj| jidX< t
kdYtldZ d[| d\t|j] d]| j^ d^tm|d_d` d>gtn| }"|s5|r=| o \|"d	< }|rJ| jp|da\|"d< }|	rU| q \|"d%< }|
r`| r \|"d;< }|rk| s \|"db< }|r| j j|O  _| t \|"dc< }#|s|r| ju|#dd\|"de< }|r| v \|"df< }|r| jwt8|"dc | j[j_ dg dh\|"di< }|r| x \|"dj< }|r| y \|"dk< }|r| z \|"dl< }|r| { \|"dm< }|r| | \|"dn< }|r| } \|"do< }dpd |"D }"t|"rtt8|"d }"| j d	 | j d k}$|$rd>ndq| j  drth| j  ds}%|$r+| j d	 nt| j dd `dtd>}&|jdukrH dvkrHdw|  nd>}'| jjrQdxn| jjrXdynd>}(t
kdzt | d_d{tld||j~  d}|j d~|" d|& dt|( dt|' d|j d~|" d|& d|  dt|( dt|% d |  d |"S )z;Return list of exported files/dirs after running callbacks.on_export_start>   trttensorrtrL   >   iosapplerO   r   	mlpackage	mlprogramrO   rm   ry   Nr   g333333?)ncutoffzInvalid export format='z'. Valid formats are z', updating to format='r|   c                    s   g | ]}| kqS rv   rv   r   )fmtrv   rw   
<listcomp>      z%Exporter.__call__.<locals>.<listcomp>z>TensorRT requires GPU export, automatically assigning device=00dla:>   r   1z5Expected self.args.device='dla:0' or 'dla:1, but got .zTExporting on CPU while CUDA is available, setting device=0 for faster export on GPU.cpurq   Tz1IMX export requires int8=True, setting int8=True.detectz/IMX export only supported for detection models.nameszChalf=True and int8=True are mutually exclusive, setting half=False.Fz<half=True only compatible with GPU export, i.e. use device=0   )stridemin_dimzHoptimize=True not compatible with format='ncnn', i.e. use optimize=FalsezEoptimize=True not compatible with cuda devices, i.e. use device='cpu'zcRockchip RKNN export requires a missing 'name' arg for processor type. Using default name='rk3588'.rk3588zInvalid processor name 'z,' for Rockchip RKNN export. Valid names are end2endz4TFLite INT8 export not supported for end2end models.z2'nms=True' is not valid for classification models.z1TFLite export with NMS unsupported on ARM64 LinuxzD'nms=True' is not available for end2end models. Forcing 'nms=False'.g      ?z_Edge TPU export only supported on non-aarch64 Linux. See https://coral.ai/docs/edgetpu/compilerz7Edge TPU export requires batch size 1, setting batch=1.a   YOLOWorld (original version) export is not supported to any format. YOLOWorldv2 models (i.e. 'yolov8s-worldv2.pt') only support export to (torchscript, onnx, openvino, engine, coreml) formats. See https://docs.ultralytics.com/models/yolo-world for details.taskzOINT8 export requires a missing 'data' arg for calibration. Using default 'data='.z8TF.js exports are not currently supported on ARM64 Linuxchannels   pt_path	yaml_file >   .yml.yaml)FXModel)make_anchorsc                 s   s    | ]	}| d dV  qdS )r   ry   N)	transposer   rv   rv   rw   r     s
    

z$Exporter.__call__.<locals>.<genexpr>c                    s   g | ]
}| j d  qS )r   )r   	unsqueeze)r   s)mrv   rw   r     s    dimg      ?ignore)categoryc                 s   s*    | ]}t t|tjr|jng V  qd S N)tuple
isinstancer   Tensorshaper   rv   rv   rw   r     s   ( yoloYOLOr   rz   zUltralytics z model ztrained on Ultralyticsz2AGPL-3.0 License (https://ultralytics.com/license)zhttps://docs.ultralytics.comc                    s   i | ]\}}| v r||qS rv   rv   r   kv)fmt_keysrv   rw   
<dictcomp>      z%Exporter.__call__.<locals>.<dictcomp>)descriptionauthordateversionlicensedocsr   r   r<   imgszr   r   r   pose	kpt_shape
zPyTorch:z starting from 'z' with input shape z BCHW and output shape(s) z (r   r   )r         keras_model      z_full_integer_quant.tflite)tflite_model   	   
               c                 S   s   g | ]}|rt |qS rv   strr   rv   rv   rw   r         u>   WARNING ⚠️ non-PyTorch val requires square images, 'imgsz=z#' will not work. Use export 'imgsz=z' if val is required. segmentrT   zdata=rI   r>   z
Export complete (zs)
Results saved to boldz$
Predict:         yolo predict task=z model=z imgsz=z 
Validate:        yolo val task=z data=z$
Visualize:       https://netron.appon_export_end)run_callbackstimer   r~   r   rx   r   difflibget_close_matches
ValueErrorr   warningsumanyr{   r  splitr   cudais_availabler6   indexr   rI   r   hasattrr   r   r   r>   typer&   r  r   rC   r=   rk   r   r}   r?   r   r   r   r   confSystemErrorr<   r   
clip_modelrz   r   r   zerosyamlgetr   r   suffixr   
parametersrequires_gradevalfloatfuseultralytics.utils.torch_utilsr   modulesr   exportr   r   max_detxyxyr   forward_splitforwardultralytics.utils.talr   catanchorsstridesrangeNMSModelwarningsfilterwarningsjitTracerWarningUserWarningDeprecationWarningimr   filer   r   output_shapestemreplacepretty_namerr   r   now	isoformatr   intmaxmetadatar  r   r"   r0   lenexport_torchscriptr.   r/   export_openvinoexport_coremlexport_saved_model	export_pbexport_tfliteexport_edgetpuexport_tfjsexport_paddle
export_mnnexport_ncnn
export_imxexport_rknnparentresolve))r   r   r   	fmts_dictfmtsr  matchesflagsrD  rA   xmlrL   rO   rQ   rT   rV   rX   rZ   r]   r`   rc   rf   ri   is_tf_formatr   rH  rI  pr   r   y_rz   r   r   r	  squarer   r  predict_dataqrv   )r   r   r   rw   __call__   s  
 










,(




",
"("


*

*$ 
zExporter.__call__r   c              	   C   s   t | d| jj d | jjdkrtnt| jj}| jj| jj	dkr&dnd }t
|| jjp0d || jj| jj| jd d	|d
}t|}|| jjk rXtd| d| jj d|dk rgt | d| d t||ddS )z=Build and return a dataloader for calibration of INT8 models.z/ collecting INT8 calibration images from 'data=r|   classifyrL   r   ry   valr   F)rz   rJ   r   r  augment
batch_sizezThe calibration dataset (zE images) must have at least as many images as the batch size ('batch=z').,  z5 >300 images recommended for INT8 calibration, found z images.)r<   workers)r   r   r   rz   r   r   r   r   r<   r~   r
   r#  rJ   r  rS  r  r   r	   )r   r   rz   r<   datasetr   rv   rv   rw   get_int8_calibration_dataloader  s,   	z(Exporter.get_int8_calibration_dataloaderzTorchScript:c                 C   s   t d| dtj d | jd}tjj| jj	r!t
| j| jn| j| jdd}dt| ji}| jjrRt | d d	d
lm} ||jt||d |dfS |jt||d |dfS )zYOLO TorchScript model export.r  z starting export with torch ...r;   Fstrictz
config.txtz optimizing for mobile...r   )optimize_for_mobile)_extra_filesN)r   r   r   r   rI  with_suffixrD  tracer   r?   rA  r   rH  jsondumpsrR  r=   torch.utils.mobile_optimizerr{  _save_for_lite_interpreterr  save)r   r   r   tsextra_filesr{  rv   rv   rw   rT    s   ,zExporter.export_torchscriptzONNX:c              
   C   s  dg}| j jr|ddtj rdnd g7 }t| ddl}| j jp$t }t	
d| d	|j d
| d t| jd}t| jtrGddgndg}| j j}|rdddddi}t| jtrnddd|d< dddd|d< nt| jtr{ddd|d< | j jr|d d | j jr| jjdkr|| j _ztjddd | W n	 ty   Y nw td t| j # t| j jrt| j| j n| j| j||dg||pdd W d   n1 sw   Y  ||}| j jrzddl}	t	
| d|	j d |	|}W n t y }
 zt	!| d |
  W Y d}
~
nd}
~
ww | j"# D ]\}}|j$% }|t||_&|_'q"|(|| ||fS )!zYOLO ONNX export.onnx>=1.12.0onnxslimonnxruntimez-gpur   r   Nr  z starting export with onnx z opset rx  rB   output0output1imagesr<   heightwidth)r   r   r   r>  )r   r   mask_height
mask_widthr   obbzaten::lift_freshc                 S   s   |S r   rv   )gru   rv   rv   rw   <lambda>A      z&Exporter.export_onnx.<locals>.<lambda>zonnxslim>=0.1.46)rD   input_namesoutput_namesrC   z slimming with onnxslim z simplifier failure: ))r   rE   r   r$  r%  r(   rA   rD   r5   r   r   r   r  rI  r}  r   r   r   rC   r   r?   popr   register_custom_op_symbolicRuntimeErrorr   r/   rA  rH  loadr  slimr   r   rR  itemsmetadata_propsaddkeyvaluer  )r   r   requirementsrA   opset_versionr   r  rC   
model_onnxr  r   r   r   metarv   rv   rw   r/   &  sj    

 
zExporter.export_onnxz	OpenVINO:c                    s  t rd}ttddd|d td ddl td	| d
 j d ts-J dt	j d j
jjr:tjjnjjjrBdnjjgjd} fdd}jjrtjjjdtj }tt|jdj }td ddl}dtjfdd}d}	tjjd t rd!t"j# d d $ddd }
|j%d|
 dd|
 dd|
 dd|
 dd|
 dgd gd!}	|j&||'(|||j)j*|	d"}||| |dfS tjjjd#tj }tt|jdj }||| |dfS )$zYOLO OpenVINO export.zVOpenVINO error in macOS>=15.4 https://github.com/openvinotoolkit/openvino/issues/30023z<15.4zmacOS T)rk   hardmsgzopenvino>=2024.0.0r   Nr  z starting export with openvino rx  z2OpenVINO export requires torch>=1.13.0 but torch==z is installed)r   example_inputc                    s   |  dddg |  dddg |  dddg |  dgdd	g |  jjdd
g |  dd jj D ddg jjdkrJ|  dddg  j| |jjd t	t
|jd j dS )z/Set RT info, serialize, and save metadata YAML.r   
model_info
model_typeTreverse_input_channelsr   	pad_value     o@scale_valuesiou_thresholdc                 S   s   g | ]}| d dqS )r  rk  )rL  )r   r   rv   rv   rw   r     r  z?Exporter.export_openvino.<locals>.serialize.<locals>.<listcomp>labelsrp  fit_to_window_letterboxresize_type)compress_to_fp16metadata.yamlN)set_rt_infor   iour   r   valuesr   
save_modelr>   r$   r   ra  rR  )ov_modelrI  ovr   rv   rw   	serializex  s   "z+Exporter.export_openvino.<locals>.serialize_int8_openvino_modelz.xmlznncf>=2.14.0r   c                 S   sX   t | tr	| d n| } | jtjksJ d|  tjd }|j	dkr*t
|dS |S )z Quantization transform function.imgz<Input image must be uint8 for the quantization preprocessingr  r   r   )r   rr   r   r   uint8numpyastypenpfloat32ndimexpand_dims)	data_itemrH  rv   rv   rw   transform_fn  s   z.Exporter.export_openvino.<locals>.transform_fnr   r   r   z.*z/.*/Addz/.*/Sub*z/.*/Mul*z/.*/Div*z\.dfl.*Sigmoid)patternstypes)r   calibration_datasetpresetignored_scoperH   )+r   r)   r   r(   rG   r   r   r   r4   r   convert_modelr   r?   rA  r   rC   rH  r   rI   r  rI  rL  r/  r   sepr   r}  rk   nncfr  ndarrayr   r   joinr   named_modulesr#  IgnoredScopequantizeDatasetrw  QuantizationPresetMIXED)r   r   r  r  r  fqfq_ovr  r  r  head_module_namequantized_ov_modelr   f_ovrv   r  rw   rU  g  sV   *






zExporter.export_openvinozPaddlePaddle:c                 C   s   t tj rdnddf ddl}ddlm} td| d|j	 d	 t
| j| jjd
tj }|| j|d| jgd tt|d | j |dfS )zYOLO Paddle export.zpaddlepaddle-gpuzpaddlepaddle>=3.0.0x2paddler   N)pytorch2paddler  z starting export with X2Paddle rx  r^   r~  )modulesave_dirjit_typeinput_examplesr  )r(   r   r$  r%  r  x2paddle.convertr  r   r   r   r  rI  rL  r/  r   r  r   rH  r$   r   rR  )r   r   r  r  r   rv   rv   rw   r\    s   zExporter.export_paddlezMNN:c           	      C   s   |   \}}td ddl}ddlm} td| d|  d t|	 s/J d| t
| jd	}d
ddd|d|dt| jg	}| jjrO|d | jjrX|d || t| jjd }|	 rm|  |dfS )z;YOLOv8 MNN export using MNN https://github.com/alibaba/MNN.z
MNN>=2.9.6r   N)
mnnconvertr  z starting export with MNN rx  failed to export ONNX file: ra   r   z-fr@   z--modelFilez
--MNNModelz	--bizCode)z--weightQuantBits8z--fp16z.__convert_external_data.bin)r/   r(   r_   	MNN.toolsr  r   r   r   r   existsr  rI  r}  r  r  rR  r   rI   r   r>   r   convertra  unlink)	r   r   f_onnxrk  r_   r  r   r   convert_scratchrv   rv   rw   r]    s"   


zExporter.export_mnnzNCNN:c              
      s  t d ddl}td| d|j d tt| j| jj	dt
j }| jd}ttr1d	nd
}| r:|nt| }| st| dt d trQdn	trUdntrYdnd z'tdd\}} fdd|D d }	t|	tswJ dt| d|	  W n* ty }
 zd}d| d  d}	t| d|
 d|	  W Y d}
~
nd}
~
ww td| d|	 dd }tt |rtj|| |d! |d" t| d#|d$  d%|d&  d'|d(  g}d)|d*  d+|d,  d-|d.  d/|d0  g}t|t|g||d1t| jj d2| j j! d3| jj"d4g| j# d5}|j$dd6 t| d7d8%| d9 t&j'|dd: d;d |D }d<d=d>d?g|R D ]}t|j(dd@ qUt)|dA | j* t|dfS )Bz9YOLO NCNN export using PNNX https://github.com/pnnx/pnnx.rc   r   Nr  z starting export with NCNN rx  rd   r;   zpnnx.exepnnxz PNNX not found. Attempting to download binary file from https://github.com/pnnx/pnnx/.
Note PNNX Binary file must be placed in current working directory or in z3. See PNNX repo for full installation instructions.macoswindowszlinux-aarch64linuxz	pnnx/pnnx)repoc                    s   g | ]}  d |v r|qS ).ziprv   r   systemrv   rw   r     r   z(Exporter.export_ncnn.<locals>.<listcomp>z#Unable to retrieve PNNX repo assetsz+ successfully found latest PNNX asset file 20240410zpnnx-r8   r  z PNNX GitHub assets not found: z, using default z/https://github.com/pnnx/pnnx/releases/download//T)delete)srcdsti  z
ncnnparam=zmodel.ncnn.paramzncnnbin=zmodel.ncnn.binzncnnpy=zmodel_ncnn.pyz
pnnxparam=zmodel.pnnx.paramzpnnxbin=zmodel.pnnx.binzpnnxpy=zmodel_pnnx.pyz	pnnxonnx=zmodel.pnnx.onnxzfp16=zdevice=zinputshape="r   "exist_ok
 running 'r  r|   checkc                 S   s   g | ]	}| d d qS )=r   )r#  r   rv   rv   rw   r         z	debug.binzdebug.paramz
debug2.binzdebug2.param
missing_okr  )+r(   rc   r   r   r   r   r  rI  rL  r/  r   r  r}  r    is_filer   r   r   r   r,   r   r   r-   r'   cwdshutilmovechmodrmtreerP  r   r>   r{   r(  r<   r  mkdirr  
subprocessrunr  r$   rR  )r   r   rc   r   f_tsrk   r  releaseassetsassetr   	unzip_dir	ncnn_args	pnnx_argscmd
pnnx_filesf_debugrv   r  rw   r^    sx   "&

	zExporter.export_ncnnzCoreML:c              
   C   s  | j j dk}td ddl}td| d|j d tr#J d| j j	d	ks-J d
| j
|r4dnd}| r@t| g d}d}d}| jjdkr]|t| jj }| j}n$| jjdkrr| j jrnt| j| jn| j}n| j jr~t| d | j}tjj| | jdd}	|j|	|jd| jj||dg||rdndd}
| j j rdn| j j!rdnd\}}|dk rd|v rtd |r|j"j#j$%|
||}
n"|dkrddl&m'  m(} |j)d|d d!}|j*|d"}|j+|
|d#}
| j jr| jjdkr|rd}n|
,t-| t-|d$ }| j.|
|d%}
| j/}|0d&|
_1|0d'|
_2|0d(|
_3|0d)|
_4|
j56d*d+ |7 D  z|
,t-| W ||
fS  t8yv } z t| d,| d- |d}|
,t-| W Y d}~||
fS d}~ww ).zYOLO CoreML export.r   zcoremltools>=8.0r   Nr  z" starting export with coremltools rx  zHCoreML export is not supported on Windows, please run on macOS or Linux.ry   zDCoreML batch sizes > 1 are not supported. Please retry at 'batch=1'.z.mlmodelrP   )r   r   r   gp?rp  r   zB 'nms=True' is only available for Detect models like 'yolo11n.pt'.Fry  image)r   scalebiasneuralnetworkr   )inputsclassifier_config
convert_to)r  kmeans)   linear)    Nr!  r  zscikit-learnr  i   )modenbitsweight_threshold)global_config)configzData/com.apple.CoreML/weightsweights_dirr   r   r   r   c                 S   s   i | ]	\}}|t |qS rv   r  r   rv   rv   rw   r   ]  r  z*Exporter.export_coreml.<locals>.<dictcomp>z& CoreML export to *.mlpackage failed (z), reverting to *.mlmodel export. Known coremltools Python 3.11 and Windows bugs https://github.com/apple/coremltools/issues/1928.)9r   r~   r   r(   coremltoolsr   r   r   r    r<   rI  r}  is_dirr  r	  r   r   ClassifierConfigr   r   r  r?   IOSDetectModelrH  r   r   rD  r~  r2  r  	ImageTyper   rI   r>   modelsneural_networkquantization_utilsquantize_weightscoremltools.optimize.coremlr=   rO   OpPalettizerConfigOptimizationConfigpalettize_weightsr  r  _pipeline_coremlrR  r  short_descriptionr   r   r   user_defined_metadataupdater  r   )r   r   r   ctr   r  r  r  r   r  ct_modelbitsr"  cto	op_configr&  r(  r   r   rv   rv   rw   rV    s|   

 
zExporter.export_coremlz	TensorRT:c                 C   s
  | j jjdksJ d|  \}}zddl}W n ty*   tr$td ddl}Y nw t|j	ddd t|j	d	d
d t
d| d|j	 d t| sUJ d| | jd}t||| jj| jj| jj| jj| j j|| jjrw| |nd| j| jj|d |dfS )z;YOLO TensorRT export https://developer.nvidia.com/tensorrt.r   z=export running on CPU but must be on GPU, i.e. use 'device=0'r   Nztensorrt>7.0.0,!=10.1.0z>=7.0.0T)r  z!=10.1.0z5https://github.com/ultralytics/ultralytics/pull/14239)r  r  z starting export with TensorRT rx  r  rM   )r   rv  rR  verboser   )rH  r{   r(  r/   r   ImportErrorr   r(   r)   r   r   r   r   r  rI  r}  r.   r   	workspacer>   rI   rC   r   rw  rR  r?  )r   r   r   r  rk  r   r   rv   rv   rw   r.   i  s:   zExporter.export_enginezTensorFlow SavedModel:c                 C   s  t j }zddl}W n ty   td ddl}Y nw tdddddd	d
d|r*dnddf
dd td| d|j d t	|jddddd ddl
}tt| j| jjd}| rbt| td}| stt| dddd d| j_|  \}}d}	| jjr|d }
| jjr|  dd | |D }t jjjt |d  | j!d "dd!d"d#}t#$t|
|% &t#j' d$|
g d%gggg d&ggggg}	t| d'|j d |j(|t|dd(| jjd)|	ddd*	}t)|d+ | j* | jjr&|
j+dd, |,d-D ]}|-|.|j/d.d/|j  q|,d0D ]}|+  q|,d1D ]}d2t|v r8|+ n| 0| q+t||fS )3z"YOLO TensorFlow SavedModel export.r   Nztensorflow>=2.0.0rS   tf_keraszsng4onnx>=1.0.1zonnx_graphsurgeon>=0.3.26zai-edge-litert>=1.2.0r  zonnx2tf>=1.26.3zonnxslim>=0.1.31zonnxruntime-gpur  zprotobuf>=5z---extra-index-url https://pypi.ngc.nvidia.com)cmdsr  ! starting export with tensorflow rx  z>=2.0.0
tensorflowTz6https://github.com/ultralytics/ultralytics/issues/5161)rk   r?  r  rR   z6calibration_image_sample_data_20x128x128x3_float32.npyr  )unzipr  z&tmp_tflite_int8_calibration_images.npyc                 S   s   g | ]}|d  qS )r  rv   )r   r<   rv   rv   rw   r     r   z/Exporter.export_saved_model.<locals>.<listcomp>)sizer   r   ry   r  r   r   r      rJ  rJ  z% starting TFLite export with onnx2tf r   z
per-tensor)	input_onnx_file_pathoutput_folder_pathnot_use_onnxsim	verbosityoutput_integer_quantized_tflite
quant_type!custom_input_op_name_np_data_pathdisable_group_convolutionenable_batchmatmul_unfoldr  r  z*_dynamic_range_quant.tflite_dynamic_range_quant_int8z%*_integer_quant_with_int16_act.tflitez*.tflitezquant_with_int16_act.tflite)1r   r$  r%  rE  r@  r(   r   r   r   r)   onnx2tfr   r  rI  rL  r/  r*  r  r	  r  r+   r   rE   r/   rI   rz   r
  rw  nn
functionalinterpolater=  r3  r  permuter  r  r  r  r  r  r$   rR  r  rglobrename	with_namerK  _add_tflite_metadata)r   r   r$  tfrV  r   onnx2tf_filer  rk  np_datatmp_filer  r	  rI  rv   rv   rw   rW    s   


 "
$zExporter.export_saved_modelzTensorFlow GraphDef:c                    s   ddl }ddlm} td| d|j d | jd}| fdd	}|	|
 jd j jd j}||}|j  |jj|jt|j|jd
d |dfS )zWYOLO TensorFlow GraphDef *.pb export https://github.com/leimao/Frozen-Graph-TensorFlow.r   N)!convert_variables_to_constants_v2r  rD  rx  rU   c                    s    | S r   rv   rt   r  rv   rw   r    s    z$Exporter.export_pb.<locals>.<lambda>F)graph_or_graph_deflogdirrk   as_text)rE  0tensorflow.python.framework.convert_to_constantsrc  r   r   r   rI  r}  functionget_concrete_function
TensorSpecr  r   r   graphas_graph_defiowrite_graphr  ra  rk   )r   r	  r   r_  rc  r   r   frozen_funcrv   r  rw   rX    s   "
zExporter.export_pbzTensorFlow Lite:c                 C   s   ddl }td| d|j d tt| j| jjd}| j	j
r,|| jj d }n| j	jr:|| jj d }n	|| jj d	 }t|dfS )
zYOLO TensorFlow Lite export.r   Nr  rD  rx  rR   z_int8.tflitez_float16.tflitez_float32.tflite)rE  r   r   r   r   r  rI  rL  r/  r   rI   rK  r>   )r   r   r_  rQ   r   rv   rv   rw   rY    s   zExporter.export_tflitez	Edge TPU:c                 C   s  t | d d}d}tsJ d| tj|tjtjddjdkrDt d| d	|  d
D ]}tjt r9|n|	ddddd q0tj|ddddj
  d }t d| d| d t|	dd}dt|j d| d}t | d| d tj|dd | | |dfS )zAYOLO Edge TPU export https://coral.ai/docs/edgetpu/models-intro/.zJ Edge TPU known bug https://github.com/ultralytics/ultralytics/issues/1185zedgetpu_compiler --versionz'https://coral.ai/docs/edgetpu/compiler/z$export only supported on Linux. See T)stdoutstderrshellr   r  z< export requires Edge TPU compiler. Attempting install from )zOcurl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -zecho "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | sudo tee /etc/apt/sources.list.d/coral-edgetpu.listzsudo apt-get updatez%sudo apt-get install edgetpu-compilerzsudo r   )rr  r  )rr  capture_outputr  r   z( starting export with Edge TPU compiler rx  rW   rY   zedgetpu_compiler --out_dir "zS" --show_operations --search_delegate --delegate_search_step 30 --timeout_sec 180 "r  r  r|   rr  N)r   r   r   r  r  DEVNULL
returncoder   r*   rL  rp  decoder#  r  r   ra  r^  )r   r  r   r  help_urlcverr   rv   rv   rw   rZ    s*   $ 	
zExporter.export_edgetpuzTensorFlow.js:c                 C   s  t d ddl}ddl}td| d|j d t| j| jj	d}t| j
d}|  }t|d	}||  W d   n1 sIw   Y  d
t|}td| d|  | jjrfdn| jjrldnd}	t|=}
t|)}d|	 d| d|
 d| d	}t| d| d tj|dd W d   n1 sw   Y  W d   n1 sw   Y  d|v rt| d| d tt|d | j |dfS )zYOLO TensorFlow.js export.tensorflowjsr   Nr  z# starting export with tensorflowjs rx  r[   rU   rb,z output node names: z--quantize_float16z--quantize_uint8r   z6tensorflowjs_converter --input_format=tf_frozen_model z --output_node_names=z "z" "r  r  r|   Trt  r  z8 your model may not work correctly with spaces in path 'r   r  )r(   rE  r{  r   r   r   r  rI  rL  r/  r}  Graphrl  openParseFromStringreadr  r   r   r>   rI   r1   r  r  r   r$   r   rR  )r   r   r_  rZ   r   f_pbr   rI  outputsquantizationfpb_f_r  rv   rv   rw   r[  +  sB    zExporter.export_tfjszRKNN:c                 C   s   t d| d td trddl}dd |_ddlm} |  \}}t	t	|j
 d	}|jd
d |dd}|jg dgg dg| jjd |j|d |jdd |dd| jj d}|||   t|d | j |dfS )zYOLO RKNN model export.r  z& starting export with rknn-toolkit2...zrknn-toolkit2r   Nc                   S   s   d S r   rv   rv   rv   rv   rw   r  V  r  z&Exporter.export_rknn.<locals>.<lambda>)rh   rj   Tr  F)r?  rH  rI  )mean_values
std_valuestarget_platform)r   )do_quantizationrB   r8   z.rknnr  )r   r   r(   r   builtinsexitrknn.apirh   r/   r   rK  r
  r&  r   rk   	load_onnxbuildrL  r`  r$   rR  )r   r   r  rh   r   rk  export_pathri   rv   rv   rw   r`  L  s"   

 zExporter.export_rknnzIMX:c              
      s  d}t sJ dt| jddrtdtd td ddl}ddl}dd	lm} dd
l	m
  td| d|j d z(tjddgdddj }td|}|rYt|dnd}|dkscJ dW n  ttjtfy   t rudgng g d }	tj|	dd Y nw | |fdd}
|ddd}|j }d| j v rg d}d }d!}ng d"}d#}d$}tt| j  |krtd%|D ]}|!|jj"j#$|gd& q|jj%|jj&d'd(|jj'dd)|d*}|jj(|d+}|r|j)j*| j|
||j)j+d,ddd-||d.d n|j,j-| j|
|||d/d }G  fd0d1d1t.j/j0}||| j1j2p'd2| j1j3| j1j4d35| j6}t7t8| j9:| j9j;d4}|j<dd5 |t7t8| j9j=:| j9j;d6 }|j>j?|||
d7 |@|}| jAB D ]\}}|jCD }|t8||_E|_Fqj|G|| tjd8d9t8|d:t8|d;d<gdd tH|d= d>d?d@}|IdAdB | jjJB D  W d   |dfS 1 sw   Y  |dfS )CzYOLO IMX export.Fzexport only supported on Linux. See https://developer.aitrios.sony-semicon.com/en/raspberrypi-ai-camera/documentation/imx500-converterr   z/IMX export is not supported for end2end models.)z model-compression-toolkit>=2.3.0zsony-custom-layers>=0.3.0zedge-mdt-tpc>=1.1.0zimx500-converter[pt]>=3.16.1r   N) get_target_platform_capabilitiesmulticlass_nmsr  z0 starting export with model_compression_toolkit rx  javaz	--versionT)r  rs  z(?:openjdk|java) (\d+)ry      zJava version too oldsudo)aptinstallz-yzopenjdk-21-jrer  c                 s   s(    | D ]}|d }|d }|gV  qd S )Nr  r  rv   )
dataloaderr<   r  rv   rv   rw   representative_dataset_gen  s   
z7Exporter.export_imx.<locals>.representative_dataset_genz4.0imx500)tpc_versiondevice_typeC2PSA)submul_2add_14cat_21g~8CA   )r  muladd_6cat_17gffffuCA   z9IMX export only supported for YOLOv8n and YOLO11n models.r  r  )num_of_images)concat_threshold_update)mixed_precision_configquantization_configbit_width_config)weights_memoryi  )n_epochsuse_hessian_based_weightsuse_hessian_sample_attention)r   representative_data_gentarget_resource_utilizationgptq_configcore_configtarget_platform_capabilities)	in_moduler  r  r  r  c                	       sF   e Zd Z			ddejjdededef fdd	Zfd
dZ	  Z
S )z'Exporter.export_imx.<locals>.NMSWrapperMbP?ffffff?rt  r   score_thresholdr  max_detectionsc                    s&   t    || _|| _|| _|| _dS )a  
                Wrapping PyTorch Module with multiclass_nms layer from sony_custom_layers.

                Args:
                    model (nn.Module): Model instance.
                    score_threshold (float): Score threshold for non-maximum suppression.
                    iou_threshold (float): Intersection over union threshold for non-maximum suppression.
                    max_detections (float): The number of detections to return.
                N)superr   r   r  r  r  )r   r   r  r  r  	__class__rv   rw   r     s
   

z0Exporter.export_imx.<locals>.NMSWrapper.__init__c                    s6   |  |}|d }|d } ||| j| j| jd}|S )Nr   ry   )boxesscoresr  r  r  r   r  r  r  )r   r  r  r  r  r?   r  rv   rw   r;    s   
z/Exporter.export_imx.<locals>.NMSWrapper.forward)r  r  rt  )__name__
__module____qualname__r   rW  Moduler3  rP  r   r;  __classcell__rv   r  r  rw   
NMSWrapper  s    r  r  r  rg   r  z	_imx.onnx)r   save_model_pathrepr_datasetz
imxconv-ptz-iz-oz--no-input-persistencyz--overwrite-outputz
labels.txtwutf-8encodingc                 S   s   g | ]	\}}| d qS )r  rv   )r   rk  rk   rv   rv   rw   r     r  z'Exporter.export_imx.<locals>.<listcomp>)Kr   r}   r   r  r(   model_compression_toolkitrA   edgemdt_tpcr  sony_custom_layers.pytorchr  r   r   r   r  r  rp  rw  researchrP  groupFileNotFoundErrorCalledProcessErrorAssertionErrorr*   rw  coreBitWidthConfig__str__rS  r   r6  set_manual_activation_bit_widthcommonnetwork_editorsNodeNameFilter
CoreConfig MixedPrecisionQuantizationConfigQuantizationConfigResourceUtilizationgptq+pytorch_gradient_post_training_quantizationget_pytorch_gptq_configptq"pytorch_post_training_quantizationr   rW  r  r   r)  r  r8  r   r{   r   r  rI  rL  r/  r
  rk   exporterpytorch_export_modelr  rR  r  r  r  r  r  r  r  
writelinesr   )r   r   r  mctrA   r  java_outputversion_matchjava_versionr  r  tpcbit_cfglayer_namesr  n_layers
layer_namer&  resource_utilizationquant_modelr  r   
onnx_modelr  r   r   r  rI  rv   r  rw   r_  g  s   

	& 


zExporter.export_imxFc                 C   s  |s-ddl }||d|j}|dtj| jdd W d   dS 1 s&w   Y  dS tr7t	d|  t
d ddl}zdd	lm} dd
lm} W n tyb   dd
lm} dd	lm} Y nw | }| jd |_| jd |_| jd |_| jd |_t|jd }	t|	ddd}
|
t| j W d   n1 sw   Y  | }|	j|_|jj|_| }d|_d|_ |! |_"|# |j"_$|j%j&|j"j$_'|j(j)|j"_*| }d|_d|_ |g|_+| j,j-dkr| }d|_d|_ |g|_+|. }|g|_/| j,j-dkr	||gn|g|_0|g|_1|2d}|3|4||j5j6 |7 }|j58t|}|9| |:t|	g |;  |	<  dS )zVAdd metadata to *.tflite models per https://ai.google.dev/edge/litert/models/metadata.r   Nazmetadata.jsonr   )indentzPTFLite Support package may not be compatible with Python>=3.12 environments for )tflite_supportz8flatbuffers>=23.5.26,<100; platform_machine == 'aarch64')metadata_schema_py_generated)rR  r   r   r   r   ztemp_meta.txtr  r  r  r  zInput image to be detected.outputzCCoordinates of detected objects, class labels, and confidence scorer  zMask protos)=zipfileZipFileZIP_DEFLATEDwritestrr  r  rR  r%   r   r   r(   flatbuffers tensorflow_lite_support.metadatar  'tensorflow_lite_support.metadata.pythonr@  r  ModelMetadataTrk   r   r   r   r   ra  r  writer  AssociatedFileTAssociatedFileTypeTENSOR_AXIS_LABELSr(  TensorMetadataTr   ContentTcontentImagePropertiesTcontentPropertiesColorSpaceTypeRGB
colorSpaceContentPropertiesImagePropertiescontentPropertiesTypeassociatedFilesr   r   SubGraphMetadataTinputTensorMetadataoutputTensorMetadatasubgraphMetadataBuilderFinishPackMetadataPopulatorMETADATA_FILE_IDENTIFIEROutputwith_model_fileload_metadata_bufferload_associated_filespopulater  )r   rI  use_flatbuffersr  zfr  schemarR  
model_metarb  r   
label_file
input_metar  output2subgraphbmetadata_buf	populatorrv   rv   rw   r^     sv   




zExporter._add_tflite_metadatazCoreML Pipeline:c                 C   s   ddl }t| d|j d t| jj\}}}}| }t|j	j
\}	}
trJddlm} |d||f}|d|i}||	j j}||
j j}n| jd | jd	 d
 f}| jd d
f}| jd }|j	jd jjj|j	jd jjj}}|\}}t||ksJ t| d| ||	jjjdd< ||
jjjdd< |jj||d}|jj }|j|_t dD ])}|j!j	j
| " }|j	j#  |j	j| $| |j	j
#  |j	j
| $| qd|j	j
d _d|j	j
d	 _|d
g}t dD ]<}|j	j
| jj}|j%j&#  d|j%j&d _'d|j%j&d _(|j%j&#  || |j%j&d	 _'|| |j%j&d	 _(|jdd= q|j)}|	j|_*|
j|_+d|_,d|_-d|_.d|_/| j0j1|_2| j0j3|_4d|j5_6|j7j89|:  |j|}|jj;j<d|jj=>d||fd|jj=? fd|jj=? fgddgd}|@| |@| |jAj	jd $|j!j	jd "  |jAj	j
d $|j!j	j
d "  |jAj	j
d	 $|j!j	j
d	 "  |j|jA_|jAj	jjBCtD|j2tD|j4d |jj|jA|d}d|jEd< d|j2 d|jEd< d|j4 d|jEd< d|jFd< d|jFd< t| d |S )zYOLO CoreML pipeline.r   Nz$ starting pipeline with coremltools rx  )Imager  r  r   ry   r  r   z names found for nc=r'  
confidencecoordinatesr   iouThresholdconfidenceThresholdTr   )input_featuresoutput_features)zIoU thresholdzConfidence thresholdzInput imagez,(optional) IoU threshold override (default: )z3(optional) Confidence threshold override (default: u?   Boxes × Class confidence (see user-defined metadata "classes")u7   Boxes × [x, y, width, height] (relative to image size)z pipeline success)Gr)  r   r   r   r   rH  r   get_speciterr   r  r   PILr+  newpredictrk   rJ  rR  r   r(  	imageTyper  r  rS  multiArrayTyper.  MLModelproto	Model_pb2ModelspecificationVersionr@  _specSerializeToStringr  r  
shapeRange
sizeRanges
lowerBound
upperBoundnonMaximumSuppressionconfidenceInputFeatureNamecoordinatesInputFeatureNameconfidenceOutputFeatureNamecoordinatesOutputFeatureNameiouThresholdInputFeatureName#confidenceThresholdInputFeatureNamer   r  r.  r)  r/  pickTopperClassstringClassLabelsvectorr   r  pipelinePipeline	datatypesArrayDouble	add_modelspecuserDefinedr9  r  input_descriptionoutput_description)r   r   r(  r   r:  rk  hr  rV  out0out1r+  r  out
out0_shape
out1_shaper   nxnyncnms_specidecoder_outputoutput_sizesma_typer?   	nms_modelrP  rv   rv   rw   r6  L  s   
&"



"""



zExporter._pipeline_coremleventc                 C   s   | j | | dS )zAppends the given callback.N)r!   r   r   ri  callbackrv   rv   rw   add_callback  r   zExporter.add_callbackc                 C   s    | j |g D ]}||  qdS )z(Execute all callbacks for a given event.N)r!   r.  rj  rv   rv   rw   r    s   
zExporter.run_callbacksr   )r   )F)r  r  r  __doc__r   r   r  ro  rw  r   r"   rT  r/   rU  r\  r]  r^  rV  r.   rW  rX  rY  rZ  r[  r`  r_  r^  r6  rl  r  rv   rv   rv   rw   r      sT      
@JCK#X$  
Ljr   c                       (   e Zd ZdZ fddZdd Z  ZS )r,  z;Wrap an Ultralytics YOLO model for Apple iOS CoreML export.c                    sh   t    |j\}}}}|| _t|j| _||kr d| | _dS t	d| d| d| d| g| _dS )zHInitialize the IOSDetectModel class with a YOLO model and example image.g      ?N)
r  r   r   r   rS  r   rb  	normalizer   tensor)r   r   rH  rk  rZ  r  r  rv   rw   r     s   
(zIOSDetectModel.__init__c                 C   s6   |  |d ddd| jfd\}}||| j fS )zRNormalize predictions of object detection model with input size-dependent factors.r   ry   r  )r   r   r#  rb  ro  )r   ru   xywhclsrv   rv   rw   r;    s   (zIOSDetectModel.forwardr  r  r  rm  r   r;  r  rv   rv   r  rw   r,    s    r,  c                       rn  )rA  zBModel wrapper with embedded NMS for Detect, Segment, Pose and OBB.c                    s<   t    || _|| _|jdk| _| jjth dv | _dS )z
        Initialize the NMSModel.

        Args:
            model (torch.nn.module): The model to wrap with NMS postprocessing.
            args (Namespace): The export arguments.
        r  >   rZ   rV   rQ   N)	r  r   r   r   r   r  r~   	frozensetis_tf)r   r   r   r  rv   rw   r     s
   
zNMSModel.__init__c                 C   s  ddl m} ddlm} | |}t|tr|d n|}t|j|j	d}|j
d }|dd}|j
d dt| jj  }| jjrl| jjdkrltjtt| jj| tdg|j
dd	 R i |}	t||	f}|jdt| jj|gd
d\}
}}|jdd\}}t|j
d | jj| j_tj|| jj|
j
d d
 | fi |}t|D ]}|
| || || || f\}}}}|| jjk}| jr||9 }|t| jjd |j
d j}|| || || || f\}}}}| }| jrdnd}| jj dkr||9 }n|| tj|j
d
d	 fi |  }| jj!sQ| jr#d
nd}|"dd#|j
d |}|d	d	d	|f ||  }tj||d	d	|d	f fdd}| jrs|t$| jpo| jj%paddk po| jj dkoo| jj& dn|}|| jrtj||gddn||| jj'd	| jj }tj|| || (dd|| (dd)|j	|| gdd}ddd| jj|j
d  f}	tj*j+,||	||< q| jj-dkr|d	| |d fS |d	| S )af  
        Performs inference with NMS post-processing. Supports Detect, Segment, OBB and Pose.

        Args:
            x (torch.Tensor): The preprocessed tensor with shape (N, 3, H, W).

        Returns:
            (torch.Tensor): List of detections, each an (N, max_det, 4 + 2 + extra_shape) Tensor where N is the number of detections after NMS.
        r   )partial)r?   )r{   r   r   r  ry   Nr   r   r  r  rV   r  rG   )use_triur  ).	functoolsrv  torchvision.opsr?   r   r   r   rr   r{   r   r   r   rS  r   r   rC   r<   r   r,  rQ  rp  r=  r#  minr8  r@  r)  ru  topkindicescloner  r~   agnostic_nmsreshapeexpandr3   rD   rI   r  viewr   rW  rX  padr   )r   ru   rv  r?   predspredr   bsextra_shaper  r  r  extrasclassesr]  rd  boxrr  scoreextramasknmsbox
multiplierend
cls_offsetoffboxnms_fnkeepdetsrv   rv   rw   r;    sp   


>"($ $
(
"
	
4.zNMSModel.forwardrs  rv   rv   r  rw   rA    s    rA  )\rm  r  r   r  r  r  r  rB  
contextlibr   copyr   r   pathlibr   r  r  r   ultralyticsr   ultralytics.cfgr   r   ultralytics.datar	   ultralytics.data.datasetr
   ultralytics.data.utilsr   r   ultralytics.nn.autobackendr   r   ultralytics.nn.modulesr   r   r   r   ultralytics.nn.tasksr   r   r   r   ultralytics.utilsr   r   r   r   r   r   r   r   r   r    r!   r"   r#   r$   ultralytics.utils.checksr%   r&   r'   r(   r)   r*   ultralytics.utils.downloadsr+   r,   r-   ultralytics.utils.exportr.   r/   ultralytics.utils.filesr0   r1   ultralytics.utils.opsr2   r3   r5  r4   r5   r6   rx   r   r   r   r   r   rW  r  r,  rA  rv   rv   rv   rw   <module>   s^   8@ %	
         k