报错详情
报错代码
from paddle.inference import PrecisionType
from PaddleDetection_Inference_Lib import Paddle_inference
# process 1
def paddle_predict(quene):
cap = Stereo_Camera(camera_id=0)
paddle_infer = Paddle_inference(model_folder_dir, use_model_img_size,
infer_img_size, use_gpu, gpu_memory,
use_tensorrt, precision_mode)
while True:
start = time.time()
image = cap.rgb_image(0)
result = paddle_infer.infer(image)
解决方法
将所有与paddle
相关的模块都放到 multiprocessing
里 import
且不要在多进程外有 import
这些模块就可以正常运行了,这样在进程结束后相应的资源也会自动释放。
# process 1
def paddle_predict(quene):
# 将paddle所有相关库都放入进程中import即可
from paddle.inference import PrecisionType
from PaddleDetection_Inference_Lib import Paddle_inference
cap = Stereo_Camera(camera_id=0)
paddle_infer = Paddle_inference(model_folder_dir, use_model_img_size,
infer_img_size, use_gpu, gpu_memory,
use_tensorrt, precision_mode)
while True:
start = time.time()
image = cap.rgb_image(0)
result = paddle_infer.infer(image)
参考文章: