YUV转UIImage

注意

由于声网视频流每秒回调15次。会触发很多的临时变量生成,为了防止内存爆增,为解码代码添加@autoreleasepool.

视频数据接收处

onCaptureVideoFrame 方法为视频回调原始视频数据

 // 获取采集的视频
    virtual bool onCaptureVideoFrame(VideoFrame& videoFrame) override
    {
        std::lock_guard<std::mutex> autolock(m_mtx);
        m_mapVideoTick[0] = get_timestamp();
        if (!mediaDataPlugin) return true;
        @autoreleasepool {
            // ScreenShot
                AgoraVideoRawData *data = getVideoRawDataWithVideoFrame(videoFrame);
                [mediaDataPlugin yuvToUIImageWithVideoRawData:data];
        }
        return true;
    }

其中AgoraVideoRawData是一个YUV的OC对象

@interface AgoraVideoRawData : NSObject

@property (nonatomic, assign) int type;
@property (nonatomic, assign) int width;  //width of video frame
@property (nonatomic, assign) int height;  //height of video frame
@property (nonatomic, assign) int yStride;  //stride of Y data buffer
@property (nonatomic, assign) int uStride;  //stride of U data buffer
@property (nonatomic, assign) int vStride;  //stride of V data buffer
@property (nonatomic, assign) int rotation; // rotation of this frame (0, 90, 180, 270)
@property (nonatomic, assign) int64_t renderTimeMs; // timestamp
@property (nonatomic, assign) char* yBuffer;  //Y data buffer
@property (nonatomic, assign) char* uBuffer;  //U data buffer
@property (nonatomic, assign) char* vBuffer;  //V data buffer
@property (nonatomic, assign) char* uvBuffer;  //UV data buffer

@end

方法getVideoRawDataWithVideoFrame:用来存储视频数据,并转成对象,具体实现方法:

    AgoraVideoRawData* getVideoRawDataWithVideoFrame(VideoFrame& videoFrame)
    {
        AgoraVideoRawData *data = [[AgoraVideoRawData alloc] init];
        data.type = videoFrame.type;
        data.width = videoFrame.width;
        data.height = videoFrame.height;
        data.yStride = videoFrame.yStride;
        data.uStride = videoFrame.uStride;
        data.vStride = videoFrame.vStride;
        data.rotation = videoFrame.rotation;
        data.renderTimeMs = videoFrame.renderTimeMs;

        
        data.yBuffer = (char*)malloc(data.yStride*data.height);
        memcpy(data.yBuffer, videoFrame.yBuffer, data.yStride*data.height);
        
        char * uBuffer = (char*)videoFrame.uBuffer;
        char * vBuffer = (char*)videoFrame.vBuffer;
        size_t uvBufferLength = data.height * data.uStride;
        char* uvBuffer = (char *)malloc(uvBufferLength);
        for (size_t uv = 0, u = 0; uv < uvBufferLength; uv += 2, u++) {
            // swtich the location of U、V,to NV12
            uvBuffer[uv] = uBuffer[u];
            uvBuffer[uv+1] = vBuffer[u];
        }
        
        data.uvBuffer = uvBuffer;
        
        return data;
    }

得到AgoraVideoRawData,后交给yuvToUIImageWithVideoRawData:方法,具体实现如下:

- (void)yuvToUIImageWithVideoRawData:(AgoraVideoRawData *)data {
    size_t width = data.width;
    size_t height = data.height;
    size_t yStride = data.yStride;
    size_t uvStride = data.uStride;
    
    @autoreleasepool {
        void * planeBaseAddress[2] = {data.yBuffer, data.uvBuffer};
        size_t planeWidth[2] = {width, width / 2};
        size_t planeHeight[2] = {height, height / 2};
        size_t planeBytesPerRow[2] = {yStride, uvStride * 2};
        
        CVPixelBufferRef pixelBuffer = NULL;
        CVReturn result = CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault,
                                                             width, height,
                                                             kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                                             NULL, 0,
                                                             2, planeBaseAddress, planeWidth, planeHeight, planeBytesPerRow,
                                                             NULL, NULL, NULL,
                                                             &pixelBuffer);
        if (result == kCVReturnSuccess) {
            UIImage *image = [self CVPixelBufferToImage:pixelBuffer rotation:data.rotation];
            if (self.imageBlock) {
                self.imageBlock(image);
            }
        } else {
            //博睿上报失败
            id brsAgent = NSClassFromString(@"BRSAgent");
            if([brsAgent respondsToSelector:@selector(setExtraInfo:)]) {
                NSDictionary *resultDict = @{@"CVPixelBufferRef":@"Faild"};
                [brsAgent performSelector:@selector(setExtraInfo:) withObject:resultDict];
            }
        }
        
        CVPixelBufferRelease(pixelBuffer);
        
        if(data.yBuffer != NULL) {
            free(data.yBuffer);
            data.yBuffer = NULL;
        }
        
        if(data.uvBuffer != NULL) {
            free(data.uvBuffer);
            data.uvBuffer = NULL;
        }
    }
}
// CVPixelBuffer-->CIImage--->AGImage Conversion
- (UIImage *)CVPixelBufferToImage:(CVPixelBufferRef)pixelBuffer rotation:(int)rotation {
    size_t width, height;
    CGImagePropertyOrientation orientation;
    switch (rotation) {
        case 0:
            width = CVPixelBufferGetWidth(pixelBuffer);
            height = CVPixelBufferGetHeight(pixelBuffer);
            orientation = kCGImagePropertyOrientationUp;
            break;
        case 90:
            width = CVPixelBufferGetHeight(pixelBuffer);
            height = CVPixelBufferGetWidth(pixelBuffer);
            orientation = kCGImagePropertyOrientationRight;
            break;
        case 180:
            width = CVPixelBufferGetWidth(pixelBuffer);
            height = CVPixelBufferGetHeight(pixelBuffer);
            orientation = kCGImagePropertyOrientationDown;
            break;
        case 270:
            width = CVPixelBufferGetHeight(pixelBuffer);
            height = CVPixelBufferGetWidth(pixelBuffer);
            orientation = kCGImagePropertyOrientationLeft;
            break;
        default:
            return nil;
    }
    CIImage *coreImage = [[CIImage imageWithCVPixelBuffer:pixelBuffer] imageByApplyingOrientation:orientation];
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext createCGImage:coreImage
                                                   fromRect:CGRectMake(0, 0, width, height)];


    UIImage *finalImage = [[UIImage alloc] initWithCGImage:videoImage];
    CGImageRelease(videoImage);
    return finalImage;
}

最后得到的finalImage就是转码之后的图片。

  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值