Halcon学习--深度学习(4)-classify_infer

* 
* This example is part of a series of examples, which summarizes
* the workflow for DL classification. It uses the MVTec pill dataset.
* The four parts are:
* 1. Dataset preprocessing.
* 2. Training of the model.
* 3. Evaluation of the trained model.
* 4. Inference on new images.
* 
* This example covers part 4: 'Inference on new images'.
* 
* It explains how to apply a trained model on new images and shows
* an application based on the MVTec pill dataset.
* 
* Please note: This script uses a pretrained model. To use the output
* of part 1 and part 2 of this example series, set UsePretrainedModel
* to false below.
* 
dev_update_off ()
* 
* In this example, the inference steps are explained in graphics windows,
* before they are executed. Set the following parameter to false in order to
* skip this visualization.
ShowExampleScreens := false
* 
* By default, this example uses a model pretrained by MVTec. To use the model
* which was trained in part 2 of this example series, set the following
* variable to false.
UsePretrainedModel := false
* 
* Inference can be done on any deep learning device available.
* See the respective system requirements in the Installation Guide.
* If possible a GPU is used in this example.
* In case you explicitely wish to run this example on the CPU,
* choose the CPU device instead.
query_available_dl_devices (['runtime','runtime'], ['gpu','cpu'], DLDeviceHandles)
if (|DLDeviceHandles| == 0)
    throw ('No supported device found to continue this example.')
endif
* Due to the filter used in query_available_dl_devices, the first device is a GPU, if available.
DLDevice := DLDeviceHandles[0]
* 
if (ShowExampleScreens)
    * 
    * Initial example windows and parameters etc.
    dev_example_init (ShowExampleScreens, UsePretrainedModel, ExampleInternals)
    * 
    * Introduction text of example series.
    dev_display_screen_introduction (ExampleInternals)
    stop ()
    * 
    * Show example image.
    dev_display_screen_example_images (ExampleInternals)
    stop ()
    * 
    * Explain inference steps.
    dev_display_screen_inference_step_1 (ExampleInternals)
    stop ()
    dev_display_screen_inference_step_2 (ExampleInternals)
    stop ()
    dev_display_screen_inference_step_3 (ExampleInternals)
    stop ()
    * 
    * Mention on which device the deep learning operators will run.
    dev_display_screen_device (ExampleInternals, DLDevice)
    stop ()
    * 
    * Run the program showing inference by means of an application.
    dev_display_screen_run_program (ExampleInternals)
    stop ()
    * 
    * Terminate example screens.
    dev_close_example_windows (ExampleInternals)
endif

* *************************************************
* **   Set paths and parameters for inference   ***
* *************************************************
* 
* We will demonstrate the inference on the example images.
* In a real application newly incoming images (not used for training or evaluation)
* would be used here.
* 
* In this example, we read the images from file.
* Directory name with the images of the pill dataset.
get_system ('example_dir', ExampleDir)
ImageDir := ExampleDir + '/images/pill'
* 
* Set the paths of the retrained model and the corresponding preprocessing parameters.
* Example data folder containing the outputs of the previous example series.
ExampleDataDir := 'classify_pill_defects_data'
if (UsePretrainedModel)
    * Use the pretrained model and preprocessing parameters shipping with HALCON.
    PreprocessParamFileName := 'classify_pill_defects_preprocess_param.hdict'
    RetrainedModelFileName := 'classify_pill_defects.hdl'
else
    * File name of the dict containing parameters used for preprocessing.
    * Note: Adapt DataDirectory after preprocessing with another image size.
    DataDirectory := ExampleDataDir + '/dldataset_pill_300x300'
    PreprocessParamFileName := DataDirectory + '/dl_preprocess_param.hdict'
    * File name of the finetuned object detection model.
    RetrainedModelFileName := ExampleDataDir + '/best_dl_model_classification.hdl'
endif
* 
* Batch Size used during inference.
BatchSizeInference := 1
* 
* ********************
* **   Inference   ***
* ********************
* 
* Check if all necessary files exist.
check_data_availability (ExampleDataDir, PreprocessParamFileName, RetrainedModelFileName, UsePretrainedModel)
* 
* Read in the retrained model.
read_dl_model (RetrainedModelFileName, DLModelHandle)
* 
* Set the batch size.
set_dl_model_param (DLModelHandle, 'batch_size', BatchSizeInference)
* 
* Initialize the model for inference.
set_dl_model_param (DLModelHandle, 'device', DLDevice)
* 
* Get the class names and IDs from the model.
get_dl_model_param (DLModelHandle, 'class_names', ClassNames)
get_dl_model_param (DLModelHandle, 'class_ids', ClassIDs)
* 
* Get the parameters used for preprocessing.
read_dict (PreprocessParamFileName, [], [], DLPreprocessParam)
* 
* Create window dictionary for displaying results.
create_dict (WindowHandleDict)
* Create dictionary with dataset parameters necessary for displaying.
create_dict (DLDataInfo)
set_dict_tuple (DLDataInfo, 'class_names', ClassNames)
set_dict_tuple (DLDataInfo, 'class_ids', ClassIDs)
* Set generic parameters for visualization.
create_dict (GenParam)
set_dict_tuple (GenParam, 'scale_windows', 1.1)
* 
* List the files the model should be applied to (e.g., using list_image_files).
* For this example, we select some images randomly.
get_example_inference_images (ImageDir, ImageFiles)
* 
* Loop over all images in batches of size BatchSizeInference for inference.
for BatchIndex := 0 to floor(|ImageFiles| / real(BatchSizeInference)) - 1 by 1
    * 
    * Get the paths to the images of the batch.
    Batch := ImageFiles[BatchIndex * BatchSizeInference:(BatchIndex + 1) * BatchSizeInference - 1]
    * Read the images of the batch.
    *读取N张图片
    read_image (ImageBatch, Batch)
    * 
    * Generate the DLSampleBatch.
    *将N张图片弄成一个张量
    gen_dl_samples_from_images (ImageBatch, DLSampleBatch)
    * 
    * Preprocess the DLSampleBatch.
    *用预处理训练数据集的方式来处理这个张量
    preprocess_dl_samples (DLSampleBatch, DLPreprocessParam)
    * 
    * Apply the DL model on the DLSampleBatch.
    *用加载的模型推断这个张量
    apply_dl_model (DLModelHandle, DLSampleBatch, [], DLResultBatch)
    * 
    * Postprocessing and visualization.
    * Loop over each sample in the batch.
    for SampleIndex := 0 to BatchSizeInference - 1 by 1
        * 
        * Get sample and according results.
        DLSample := DLSampleBatch[SampleIndex]
        DLResult := DLResultBatch[SampleIndex]
        * 
        * Display results and text.
        dev_display_dl_data (DLSample, DLResult, DLDataInfo, 'classification_result', GenParam, WindowHandleDict)
        get_dict_tuple (WindowHandleDict, 'classification_result', WindowHandles)
        dev_set_window (WindowHandles[0])
        set_display_font (WindowHandles[0], 16, 'mono', 'true', 'false')
        dev_disp_text ('Press Run (F5) to continue', 'window', 'bottom', 'right', 'black', [], [])
        stop ()
    endfor
endfor
* 
* Close windows used for visualization.
dev_close_window_dict (WindowHandleDict)
* 
* 
if (ShowExampleScreens)
    * Final explanations.
    dev_display_screen_final (ExampleInternals)
    stop ()
    * Close example windows.
    dev_close_example_windows (ExampleInternals)
endif

 

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

枫呱呱

如果这篇博文对你有用,求打赏

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值