Closed spchziee closed 6 years ago
测试后发现当我把800宽的视频流当成每行832个像素来处理时,Y色块的值正常了.但UV色块始终会有偏差,实在是找不出为什么,请大神们指教
self.ksyPlayer.videoDataBlock = ^(CMSampleBufferRef pixelBuffer) { if(weakSelf.bRecording){ CMSampleBufferRef newBuffer = [weakSelf createVideoSample:pixelBuffer]; //这里只是做个测试,如果只是为了保存一样的数据,那自然是把pixelBuffer传进去就好了 [weakSelf.recordSdk addVideoData:newBuffer]; CFRelease(newBuffer); } };
-(CMSampleBufferRef) createVideoSample:(CMSampleBufferRef) pixelBuffer{ CVPixelBufferRef realBuffer = CMSampleBufferGetImageBuffer(pixelBuffer); size_t oriWidth = CVPixelBufferGetWidth(realBuffer); size_t oriHeight = CVPixelBufferGetHeight(realBuffer); CGSize size = CGSizeMake(oriWidth, oriHeight); const void *keys[] = { kCVPixelBufferOpenGLESCompatibilityKey, kCVPixelBufferIOSurfacePropertiesKey, }; const void *values[] = { (__bridge const void *)([NSNumber numberWithBool:YES]), (__bridge const void *)([NSDictionary dictionary]), }; OSType oriformatType = CVPixelBufferGetPixelFormatType(realBuffer); CFDictionaryRef optionsDictionary = CFDictionaryCreate(NULL, keys, values, 2, NULL, NULL); CVPixelBufferRef newPixelBuffer = NULL; CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, oriformatType, optionsDictionary, &newPixelBuffer); CFRelease(optionsDictionary); CVPixelBufferLockBaseAddress( newPixelBuffer,0); CVPixelBufferLockBaseAddress( realBuffer,0); UInt8 *oriPixel = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(realBuffer,0); UInt8 *oriPixelUV = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(realBuffer,1); size_t oriPerRow = CVPixelBufferGetBytesPerRow(realBuffer); size_t newWidth = CVPixelBufferGetWidth(newPixelBuffer); size_t newHeight = CVPixelBufferGetHeight(newPixelBuffer); UInt8 *newPixel = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(newPixelBuffer,0); UInt8 *newPixelUV = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(newPixelBuffer,1); size_t newPerRow = CVPixelBufferGetBytesPerRow(newPixelBuffer); NSLog(@"录制 ORIvideo (%luX%lu) NEWvideo (%luX%lu)",oriWidth,oriHeight,newWidth,newHeight); /* 800X600时 newPerRow=1204 oriPerRow=1276 640X480时 newPerRow=964 oriPerRow=972 */ memcpy(newPixel, oriPixel, newWidth*newHeight); memcpy(newPixelUV, oriPixelUV, newWidth*newHeight/2); CVPixelBufferUnlockBaseAddress(realBuffer, 0); CVPixelBufferUnlockBaseAddress(newPixelBuffer, 0); CMVideoFormatDescriptionRef videoInfo = NULL; OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(NULL, newPixelBuffer, &videoInfo); CMSampleBufferRef sout = nil; CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid; CMSampleBufferGetSampleTimingInfo(pixelBuffer, 0, &timimgInfo); CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, newPixelBuffer, true, NULL, NULL, videoInfo, &timimgInfo, &sout); CFRelease(videoInfo); CFRelease(newPixelBuffer); return sout; }
YUV数据有个术语叫做stride还是linesize的,这个值表示每行的数据个数,这个值并不一定是图像的宽度,一般会比宽度大几个字节,YUV三个分量都是,出现颜色偏差应该是这个没有处理好 试试那个memcpy一行一行的做,看看效果
最后实验下来发现宽需要的是64的倍数,图像格式是NV12.UV每行也是64的倍数.谢谢 @mayudong1
测试后发现当我把800宽的视频流当成每行832个像素来处理时,Y色块的值正常了.但UV色块始终会有偏差,实在是找不出为什么,请大神们指教