La meilleure façon de procéder est d'utiliser un objet AVCaptureSession. Je fais exactement ce que vous parlez dans mon application gratuite "Live Effects Cam"
Il existe plusieurs exemples de code en ligne qui vous aideront à mettre en œuvre cela aussi. Voici un exemple de code qui pourrait vous aider:
- (void) activateCameraFeed
{
videoSettings = nil;
#if USE_32BGRA
pixelFormatCode = [[NSNumber alloc] initWithUnsignedInt:(unsigned int)kCVPixelFormatType_32BGRA];
pixelFormatKey = [[NSString alloc] initWithString:(NSString *)kCVPixelBufferPixelFormatTypeKey];
videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:pixelFormatCode, pixelFormatKey, nil];
#endif
videoDataOutputQueue = dispatch_queue_create("com.jellyfilledstudios.ImageCaptureQueue", NULL);
captureVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
[captureVideoOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
[captureVideoOutput setVideoSettings:videoSettings];
[captureVideoOutput setMinFrameDuration:kCMTimeZero];
dispatch_release(videoDataOutputQueue); // AVCaptureVideoDataOutput uses dispatch_retain() & dispatch_release() so we can dispatch_release() our reference now
if (useFrontCamera)
{
currentCameraDeviceIndex = frontCameraDeviceIndex;
cameraImageOrientation = UIImageOrientationLeftMirrored;
}
else
{
currentCameraDeviceIndex = backCameraDeviceIndex;
cameraImageOrientation = UIImageOrientationRight;
}
selectedCamera = [[AVCaptureDevice devices] objectAtIndex:(NSUInteger)currentCameraDeviceIndex];
captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:selectedCamera error:nil];
captureSession = [[AVCaptureSession alloc] init];
[captureSession beginConfiguration];
[self setCaptureConfiguration];
[captureSession addInput:captureVideoInput];
[captureSession addOutput:captureVideoOutput];
[captureSession commitConfiguration];
[captureSession startRunning];
}
// AVCaptureVideoDataOutputSampleBufferDelegate
// AVCaptureAudioDataOutputSampleBufferDelegate
//
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
if (captureOutput==captureVideoOutput)
{
[self performImageCaptureFrom:sampleBuffer fromConnection:connection];
}
[pool drain];
}
- (void) performImageCaptureFrom:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer;
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1)
return;
if (!CMSampleBufferIsValid(sampleBuffer))
return;
if (!CMSampleBufferDataIsReady(sampleBuffer))
return;
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (CVPixelBufferGetPixelFormatType(imageBuffer) != kCVPixelFormatType_32BGRA)
return;
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
int bufferSize = bytesPerRow * height;
uint8_t *tempAddress = malloc(bufferSize);
memcpy(tempAddress, baseAddress, bytesPerRow * height);
baseAddress = tempAddress;
//
// Apply affects to the pixels stored in (uint32_t *)baseAddress
//
//
// example: grayScale((uint32_t *)baseAddress, width, height);
// example: sepia((uint32_t *)baseAddress, width, height);
//
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = nil;
if (cameraDeviceSetting != CameraDeviceSetting640x480) // not an iPhone4 or iTouch 5th gen
newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst);
else
newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGColorSpaceRelease(colorSpace);
CGContextRelease(newContext);
free(tempAddress);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
if (newImage == nil)
{
return;
}
// To be able to display the CGImageRef newImage in your UI you will need to do it like this
// because you are running on a different thread here…
//
[self performSelectorOnMainThread:@selector(newCameraImageNotification:) withObject:(id)newImage waitUntilDone:YES];
}
J'ai essayé votre caméra d'effets en direct, elle a fière allure, et elle a beaucoup plus de fonctionnalités que j'essayais d'implémenter. Bon travail! Juste surpris que c'est gratuit. – BlueDolphin
Merci. Je reçois moins de 50 téléchargements par jour quand c'est 99 cents et en moyenne plus de 1500 téléchargements par jour quand c'est gratuit. Je publie une mise à jour qui inclut l'achat dans l'application pour les plus demandées des nouvelles fonctionnalités. Je recommande l'application gratuite avec l'approche d'achat in-app à quiconque développe une nouvelle application aujourd'hui. –