2011-11-02 1 views
1

J'essaie de créer un patch personnalisé pour Quartz Composer qui fonctionnera comme le patch d'entrée vidéo, mais avec un périphérique de capture sélectionnable sur un port d'entrée. C'est un petit patch, et qui me va bien, mais quand je connecte un périphérique DV (Canopus ADVC-110), et que je le sélectionne, le ColorSpace est (null), et je reçois une exception. Cela fonctionne bien pour la caméra FaceTime HD, qui est un type de support vidéo. Je dois manquer quelque chose, mais je ne peux pas le voir.Création d'un patch d'entrée vidéo sélectionnable pour Quartz: Les entrées Muxed échouent

La fonction delegate captureOutput se répète comme si de nouvelles images arrivaient et que la capture semblait bien démarrer. Qu'est-ce que je rate?

#import <OpenGL/CGLMacro.h> 
#import "CaptureWithDevice.h" 

#define kQCPlugIn_Name    @"Capture With Device" 
#define kQCPlugIn_Description  @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user." 

@implementation CaptureWithDevice 
@dynamic inputDevice, outputImage; 

+ (NSDictionary*) attributes 
{ 
    return [NSDictionary dictionaryWithObjectsAndKeys: 
      kQCPlugIn_Name, QCPlugInAttributeNameKey, 
      kQCPlugIn_Description, QCPlugInAttributeDescriptionKey, 
      nil]; 
} 
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key 
{  
    if([key isEqualToString:@"inputDevice"]) { 
     NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; 
     NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed]; 

     NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ]; 
     [mutableArrayOfDevice addObjectsFromArray:videoDevices]; 
     [mutableArrayOfDevice addObjectsFromArray:muxedDevices]; 

     NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice]; 
     [mutableArrayOfDevice release]; 

     NSMutableArray *deviceNames= [NSMutableArray array]; 

     int i, ic= [devices count]; 


     for(i= 0; i<ic; i++) { 
      [deviceNames addObject:[[devices objectAtIndex:i] description]]; 
      // be sure not to add CT to the list 
     } 



     return [NSDictionary dictionaryWithObjectsAndKeys: 
       @"Device", QCPortAttributeNameKey, 
       QCPortTypeIndex,QCPortAttributeTypeKey, 
       [NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey, 
       deviceNames, QCPortAttributeMenuItemsKey, 
       [NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey, 
       nil]; 
    } 
    if([key isEqualToString:@"outputImage"]) 
     return [NSDictionary dictionaryWithObjectsAndKeys: 
       @"Video Image", QCPortAttributeNameKey, 
       nil]; 
    return nil; 
} 
+ (QCPlugInExecutionMode) executionMode 
{ 
    return kQCPlugInExecutionModeProvider; 
} 

+ (QCPlugInTimeMode) timeMode 
{ 
    return kQCPlugInTimeModeIdle; 
} 

- (id) init 
{ 
    if(self = [super init]) { 
     [[NSNotificationCenter defaultCenter] addObserver:self 
               selector:@selector(_devicesDidChange:) 
                name:QTCaptureDeviceWasConnectedNotification 
                object:nil]; 
     [[NSNotificationCenter defaultCenter] addObserver:self 
               selector:@selector(_devicesDidChange:) 
                name:QTCaptureDeviceWasDisconnectedNotification 
                object:nil]; 
    } 
    return self; 
} 

- (void) finalize 
{ 
    [super finalize]; 
} 

- (void) dealloc 
{ 
    if (mCaptureSession) { 
     [mCaptureSession release]; 
     [mCaptureDeviceInput release]; 
     [mCaptureDecompressedVideoOutput release]; 
    } 
    [[NSNotificationCenter defaultCenter] removeObserver:self]; 
    [super dealloc]; 
} 

@end 

@implementation CaptureWithDevice (Execution) 

- (BOOL) startExecution:(id<QCPlugInContext>)context 
{ 
    return YES; 
} 

- (void) enableExecution:(id<QCPlugInContext>)context 
{ 
} 
static void _BufferReleaseCallback(const void* address, void* info) 
{ 
    CVPixelBufferUnlockBaseAddress(info, 0); 

    CVBufferRelease(info); 
} 
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments 
{ 
    if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){ 
     NSError *error = nil; 
     BOOL success; 

     NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; 
     NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed]; 

     NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ]; 
     [mutableArrayOfDevice addObjectsFromArray:videoDevices]; 
     [mutableArrayOfDevice addObjectsFromArray:muxedDevices]; 

     NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice]; 
     [mutableArrayOfDevice release]; 


     NSUInteger d= self.inputDevice; 
     if (!(d<[devices count])) { 
      d= 0; 
     } 
     QTCaptureDevice *device = [devices objectAtIndex:d]; 
     success = [device open:&error]; 
     if (!success) { 
      NSLog(@"Could not open device %@", device); 
      self.outputImage = nil; 
      return YES; 
     } 
     NSLog(@"Opened device successfully"); 




     [mCaptureSession release]; 
     mCaptureSession = [[QTCaptureSession alloc] init]; 

     [mCaptureDeviceInput release]; 
     mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device]; 

     // if the device is a muxed connection make sure to get the right connection 
     if ([muxedDevices containsObject:device]) { 
      NSLog(@"Disabling audio connections"); 
      NSArray *ownedConnections = [mCaptureDeviceInput connections]; 
      for (QTCaptureConnection *connection in ownedConnections) { 
       NSLog(@"MediaType: %@", [connection mediaType]); 
       if ([[connection mediaType] isEqualToString:QTMediaTypeSound]) { 
        [connection setEnabled:NO]; 
        NSLog(@"disabling audio connection"); 

       } 
      } 
     } 



     success = [mCaptureSession addInput:mCaptureDeviceInput error:&error]; 

     if (!success) { 
      NSLog(@"Failed to add Input"); 
      self.outputImage = nil; 
      if (mCaptureSession) { 
       [mCaptureSession release]; 
       mCaptureSession= nil; 
      } 
      if (mCaptureDeviceInput) { 
       [mCaptureDeviceInput release]; 
       mCaptureDeviceInput= nil; 

      } 
      return YES; 
     } 




     NSLog(@"Adding output"); 

     [mCaptureDecompressedVideoOutput release]; 
     mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init]; 

     [mCaptureDecompressedVideoOutput setPixelBufferAttributes: 
     [NSDictionary dictionaryWithObjectsAndKeys: 
      [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey, 
      [NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]]; 

     [mCaptureDecompressedVideoOutput setDelegate:self]; 
     success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error]; 

     if (!success) { 
      NSLog(@"Failed to add output"); 
      self.outputImage = nil; 
      if (mCaptureSession) { 
       [mCaptureSession release]; 
       mCaptureSession= nil; 
      } 
      if (mCaptureDeviceInput) { 
       [mCaptureDeviceInput release]; 
       mCaptureDeviceInput= nil; 
      } 
      if (mCaptureDecompressedVideoOutput) { 
       [mCaptureDecompressedVideoOutput release]; 
       mCaptureDecompressedVideoOutput= nil; 
      } 
      return YES; 
     } 

     [mCaptureSession startRunning]; 
     _currentDevice= self.inputDevice; 
    } 


    CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer); 

    if (imageBuffer) { 
     CVPixelBufferLockBaseAddress(imageBuffer, 0); 
     NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer)); 
     //NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer)); 
     id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8   
                   pixelsWide:CVPixelBufferGetWidth(imageBuffer) 
                   pixelsHigh:CVPixelBufferGetHeight(imageBuffer) 
                   baseAddress:CVPixelBufferGetBaseAddress(imageBuffer) 
                   bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer) 
                  releaseCallback:_BufferReleaseCallback 
                  releaseContext:imageBuffer 
                   colorSpace:CVImageBufferGetColorSpace(imageBuffer) 
                  shouldColorMatch:YES]; 
     if(provider == nil) { 
      return NO; 
     } 
     self.outputImage = provider; 
    } 
    else 
     self.outputImage = nil; 

    return YES; 
} 

- (void) disableExecution:(id<QCPlugInContext>)context 
{ 
} 
- (void) stopExecution:(id<QCPlugInContext>)context 
{ 
} 

- (void)captureOutput:(QTCaptureOutput *)captureOutput 
    didOutputVideoFrame:(CVImageBufferRef)videoFrame 
    withSampleBuffer:(QTSampleBuffer *)sampleBuffer 
     fromConnection:(QTCaptureConnection *)connection 
{  
    NSLog(@"connection type: %@", [connection mediaType]); 
    CVImageBufferRef imageBufferToRelease; 
    CVBufferRetain(videoFrame); 
    imageBufferToRelease = mCurrentImageBuffer; 


    @synchronized (self) { 
     mCurrentImageBuffer = videoFrame; 
    } 
    CVBufferRelease(imageBufferToRelease); 
} 
- (void)_devicesDidChange:(NSNotification *)aNotification 
{ 
} 
@end 

Répondre

1

j'ai réussi à obtenir ce patch pour travailler avec les deux entrées vidéo et multiplexés en retirant le kCVPixelBufferOpenGLCompatibilityKey de mCaptureDecompressedVideoOutput. Bien que cela permette au correctif de fonctionner parfaitement dans Quartz Composer, mon intention est d'exécuter ce correctif dans une composition utilisée dans CamTwist, qui ne semble pas avoir besoin du support d'OpenGL. À l'heure actuelle, il affiche simplement un écran noir avec des entrées Video ou Muxed, où il travaillait auparavant avec des entrées vidéo. Donc, je vais convertir mon CVImageBufferRef à une texture OpenGL et voir si je peux obtenir que cela fonctionne avec

outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch 
Questions connexes