Ios tourne, filter le stream video dans ios

Bonjour Là, je suis en train de tourner et d'appliquer des filters d'image par GPUImage sur vide stream en direct La tâche consum plus de time que prévu résultant de la surchauffe de l'iPhone Quelqu'un peut-il m'aider à optimiser mon code?

- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{ //return if invalid sample buffer if (!CMSampleBufferIsValid(sampleBuffer)) { return; } //Get CG Image from sample buffer CGImageRef cgImageFromBuffer = [self cgImageFromSampleBuffer:sampleBuffer]; if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){ return; } //We need rotation to perform UIImage *rotatedPlainImage = [UIUtils rotateImage:[UIImage imageWithCGImage:cgImageFromBuffer] byDegree:90]; if (rotatedPlainImage == nil) { CFRelease(cgImageFromBuffer); return; } //Apply image filter using GPU Image on CGImage CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage]; //Convert back in CMSamplbuffer CMSampleBufferRef outputBufffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer]; //Pass to custom encode of Red5Pro to server for live stream [self.encoder encodeFrame:outputBufffer ofType:r5_media_type_video_custom]; //Release data if needed CFRelease(outputBufffer); CFRelease(filteredCGImage); CFRelease(cgImageFromBuffer); } - (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); CVPixelBufferUnlockBaseAddress(imageBuffer,0); /* CVBufferRelease(imageBuffer); */ // do not call this! return newImage; } - (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{ CIImage *nm = [CIImage imageWithCGImage:imageRef]; CVPixelBufferRef pixelBuffer; CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)nm.extent.size.width, (size_t)nm.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer); CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); CIContext *ciContext = [CIContext contextWithOptions: nil]; [ciContext render:nm toCVPixelBuffer:pixelBuffer]; CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); CMSampleTimingInfo sampleTime = { .duration = CMSampleBufferGetDuration(sampleBuffer), .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer), .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer) }; CMVideoFormatDescriptionRef videoInfo = NULL; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); CMSampleBufferRef oBuf; CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf); CVPixelBufferRelease(pixelBuffer); CFRelease(videoInfo); return oBuf; } 

J'ai utilisé OpenGL 2.0 et Accelerate Framework

Accélérer le cadre pour faire pivoter CMSampleBuffer

Maintenant, sans filter, le time est de 3 à 8 millisecondes

Avec filters ses 7-21 millisecondes

OpenGL pour rendre l'image CI rapide sur CVPixelBuffer

 @implementation ColorsVideoSource{ CIContext *coreImageContext; } - (instancetype)init{ if((self = [super init]) != nil){ EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; GLKView *glView = [[GLKView alloc] initWithFrame:CGRectMake(0.0, 0.0, 360.0, 480.0) context:glContext]; coreImageContext = [CIContext contextWithEAGLContext:glView.context]; } return self; } - (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{ if (!CMSampleBufferIsValid(sampleBuffer)) { return; } CVPixelBufferRef rotateBuffer = [self correctBufferOrientation:sampleBuffer]; CGImageRef cgImageFromBuffer = [self cgImageFromImageBuffer:rotateBuffer]; if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){ return; } UIImage *rotatedPlainImage = [UIImage imageWithCGImage:cgImageFromBuffer]; if (rotatedPlainImage == nil) { CFRelease(rotateBuffer); CFRelease(cgImageFromBuffer); return; } if (_currentFilterType == SWPublisherFilterNone) { if (_needPreviewImage) { _previewImage = rotatedPlainImage; } CMSampleTimingInfo sampleTime = { .duration = CMSampleBufferGetDuration(sampleBuffer), .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer), .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer) }; CMVideoFormatDescriptionRef videoInfo = NULL; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, &videoInfo); CMSampleBufferRef oBuf; CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf); CFRelease(videoInfo); if(!self.pauseEncoding){ @try { [self.encoder encodeFrame:oBuf ofType:r5_media_type_video_custom]; } @catch (NSException *exception) { NSLog(@"Encoder error: %@", exception); } } CFRelease(oBuf); } else { CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage]; if (_needPreviewImage) { _previewImage = [UIImage imageWithCGImage:filteredCGImage]; } CMSampleBufferRef outputBuffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer]; if(!self.pauseEncoding){ @try { [self.encoder encodeFrame:outputBuffer ofType:r5_media_type_video_custom]; } @catch (NSException *exception) { NSLog(@"Encoder error: %@", exception); } } CFRelease(outputBuffer); CFRelease(filteredCGImage); } CFRelease(rotateBuffer); CFRelease(cgImageFromBuffer); } #pragma mark - Methods Refactored GPUImage - Devanshu - (CVPixelBufferRef)correctBufferOrientation:(CMSampleBufferRef)sampleBuffer { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer, 0); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); size_t currSize = bytesPerRow * height * sizeof(unsigned char); size_t bytesPerRowOut = 4 * height * sizeof(unsigned char); void *srcBuff = CVPixelBufferGetBaseAddress(imageBuffer); /* rotationConstant: * 0 -- rotate 0 degrees (simply copy the data from src to dest) * 1 -- rotate 90 degrees counterclockwise * 2 -- rotate 180 degress * 3 -- rotate 270 degrees counterclockwise */ uint8_t rotationConstant = 3; unsigned char *dstBuff = (unsigned char *)malloc(currSize); vImage_Buffer inbuff = {srcBuff, height, width, bytesPerRow}; vImage_Buffer outbuff = {dstBuff, width, height, bytesPerRowOut}; uint8_t bgColor[4] = {0, 0, 0, 0}; vImage_Error err = vImageRotate90_ARGB8888(&inbuff, &outbuff, rotationConstant, bgColor, 0); if (err != kvImageNoError) NSLog(@"%ld", err); CVPixelBufferUnlockBaseAddress(imageBuffer, 0); CVPixelBufferRef rotatedBuffer = NULL; CVPixelBufferCreateWithBytes(NULL, height, width, kCVPixelFormatType_32BGRA, outbuff.data, bytesPerRowOut, freePixelBufferDataAfterRelease, NULL, NULL, &rotatedBuffer); return rotatedBuffer; } void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress) { // Free the memory we malloced for the vImage rotation free((void *)baseAddress); } - (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); return [self cgImageFromImageBuffer:imageBuffer]; } - (CGImageRef)cgImageFromImageBuffer:(CVImageBufferRef) imageBuffer // Create a CGImageRef from sample buffer data { CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); CVPixelBufferUnlockBaseAddress(imageBuffer,0); return newImage; } - (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{ CIImage *theCoreImage = [CIImage imageWithCGImage:imageRef]; CVPixelBufferRef pixelBuffer; CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)theCoreImage.extent.size.width, (size_t)theCoreImage.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer); CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); [coreImageContext render:theCoreImage toCVPixelBuffer:pixelBuffer]; CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); CMSampleTimingInfo sampleTime = { .duration = CMSampleBufferGetDuration(sampleBuffer), .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer), .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer) }; CMVideoFormatDescriptionRef videoInfo = NULL; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); CMSampleBufferRef oBuf; CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf); CVPixelBufferRelease(pixelBuffer); CFRelease(videoInfo); return oBuf; }