2015-10-13 1 views
4

J'avais projet de démarrage qui a utilisé AVFoundation pour configurer l'appareil photo et cela a fonctionné parfaitement. Maintenant, j'avais besoin de convertir le mécanisme de la caméra en GPUImage. J'utilise le même focus et la même méthode d'exposition dans les deux projets (qui a fonctionné comme préfet du projet AVFoundation) mais sur le projet GPUImage, il ne se focalise pas correctement et est toujours incorrect.GPUImage - La mise au point et l'exposition à la pression ne fonctionnent pas correctement - Manque quelque chose?

Ne pas l'esprit le filtre applique c'est la même sur tous les

Exemple: En haut à droite de l'écran, vous pouvez voir l'agneau. C'est comme ça que ça se concentre + l'exposition.

enter image description here

Mettre en place GPU:

stillCamera = GPUImageStillCamera(sessionPreset: AVCaptureSessionPreset640x480, cameraPosition: .Front) 
    CorrectPosition = AVCaptureDevicePosition.Front 
    stillCamera!.outputImageOrientation = .Portrait; 
    stillCamera?.horizontallyMirrorFrontFacingCamera = true 
    filter = GPUImageFilter() 
    stillCamera?.addTarget(filter) 
    filter?.addTarget(self.view as! GPUImageView) 
    (self.view as! GPUImageView).fillMode = GPUImageFillModeType.init(2) 

méthode TouchBegan:

override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) { 
     var tap : CGPoint! 
     if let touch = touches.first as UITouch! { 
      tap = touch.locationInView(self.view) 
     } 
     let device: AVCaptureDevice! = self.stillCamera?.inputCamera! 
     var error: NSError? = nil 
     do { 
      try device.lockForConfiguration() 
      if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus){ 
       device.focusMode = AVCaptureFocusMode.AutoFocus 
       device.focusPointOfInterest = tap 
      } 
      if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose){ 
       device.exposurePointOfInterest = tap 
       device.exposureMode = AVCaptureExposureMode.AutoExpose 
      } 
      device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange 
      device.unlockForConfiguration() 
     } catch let error1 as NSError { 
      error = error1 
      print(error) 
     } catch { 
      fatalError() 
     } 
    } 

Toutes les idées?

Répondre

7

La question que vous êtes probablement rencontrez est de x et y le device.focusPointOfInterest besoin d'être dans [0;1] plage, où le point (0,0) est le coin inférieur gauche de l'appareil photo et le (1,1) est en haut à droite, alors que vous passez la coordonnées du robinet dans le système de coordonnées de la vue.

La seule chose que vous devez faire est de convertir les coordonnées de tap dans les points de votre caméra. Notez cependant que la caméra peut avoir différents modes de remplissage.

Voici comment je fais la conversion (désolé pour le code Objective-C, mais il y a des mathématiques pour la plupart simples):

CGPoint tapPoint = [gestureRecognizer locationInView:cameraView]; 

CGPoint pointOfInterest = [HBFocusUtils convertToPointOfInterestFromViewCoordinates:tapPoint inFrame:cameraView.bounds withOrientation:self.currentOrientation andFillMode:cameraView.fillMode mirrored:currentVideoCamera == frontVideoCamera]; 

[HBFocusUtils setFocus:pointOfInterest forDevice:currentVideoCamera.inputCamera]; 

et la mise en œuvre des méthodes:

@implementation HBFocusUtils 

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored; 
{ 
    CGSize frameSize = frame.size; 
    CGPoint pointOfInterest = CGPointMake(0.5, 0.5); 

    if (mirrored) 
    { 
     viewCoordinates.x = frameSize.width - viewCoordinates.x; 
    } 

    if (fillMode == kGPUImageFillModeStretch) { 
     pointOfInterest = CGPointMake(viewCoordinates.y/frameSize.height, 1.f - (viewCoordinates.x/frameSize.width)); 
    } else { 
     CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame)); 
     if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) { 
      CGPoint point = viewCoordinates; 
      CGFloat apertureRatio = apertureSize.height/apertureSize.width; 
      CGFloat viewRatio = frameSize.width/frameSize.height; 
      CGFloat xc = .5f; 
      CGFloat yc = .5f; 

      if (fillMode == kGPUImageFillModePreserveAspectRatio) { 
       if (viewRatio > apertureRatio) { 
        CGFloat y2 = frameSize.height; 
        CGFloat x2 = frameSize.height * apertureRatio; 
        CGFloat x1 = frameSize.width; 
        CGFloat blackBar = (x1 - x2)/2; 
        if (point.x >= blackBar && point.x <= blackBar + x2) { 
         xc = point.y/y2; 
         yc = 1.f - ((point.x - blackBar)/x2); 
        } 
       } else { 
        CGFloat y2 = frameSize.width/apertureRatio; 
        CGFloat y1 = frameSize.height; 
        CGFloat x2 = frameSize.width; 
        CGFloat blackBar = (y1 - y2)/2; 
        if (point.y >= blackBar && point.y <= blackBar + y2) { 
         xc = ((point.y - blackBar)/y2); 
         yc = 1.f - (point.x/x2); 
        } 
       } 
      } else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) { 
       if (viewRatio > apertureRatio) { 
        CGFloat y2 = apertureSize.width * (frameSize.width/apertureSize.height); 
        xc = (point.y + ((y2 - frameSize.height)/2.f))/y2; 
        yc = (frameSize.width - point.x)/frameSize.width; 
       } else { 
        CGFloat x2 = apertureSize.height * (frameSize.height/apertureSize.width); 
        yc = 1.f - ((point.x + ((x2 - frameSize.width)/2))/x2); 
        xc = point.y/frameSize.height; 
       } 
      } 

      pointOfInterest = CGPointMake(xc, yc); 
     } 
    } 

    return pointOfInterest; 
} 

+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device 
{ 
    if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) 
    { 
     NSError *error; 
     if ([device lockForConfiguration:&error]) 
     { 
      [device setFocusPointOfInterest:focus]; 
      [device setFocusMode:AVCaptureFocusModeAutoFocus]; 
      [device unlockForConfiguration]; 
     } 
    } 

    if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose]) 
    { 
     NSError *error; 
     if ([device lockForConfiguration:&error]) 
     { 
      [device setExposurePointOfInterest:focus]; 
      [device setExposureMode:AVCaptureExposureModeAutoExpose]; 
      [device unlockForConfiguration]; 
     } 
    } 
} 

@end 
+0

Hey! Merci pour votre commentaire! Je vais le tester aujourd'hui! Question: D'après ce que j'ai compris, j'ai besoin de choisir la bonne "formule" en fonction de ma méthode FillMode? –

+0

Fonctionne! Homme merveilleux! Je vous remercie! –

+0

Merci! Cela m'a sauvé des heures de travail. – thedeveloper3124

3

SWift 1) Première création HBFocusUtils Classe en Objective-C

2) #import "HBFocusUtils.h" dans le fichier pont

//Focus on tap 
//============ 
let tap = UITapGestureRecognizer(target: self, action: Selector("tapOnFocus:")) 
tap.delegate = self 
filterView.addGestureRecognizer(tap) 

func tapOnFocus(gestureRecognizer: UITapGestureRecognizer? = nil) 
    { 
     let tapPoint = (gestureRecognizer?.locationInView(filterView))! as CGPoint 
     let pointOfInterest = HBFocusUtils.convertToPointOfInterestFromViewCoordinates(tapPoint, inFrame: filterView.bounds, withOrientation: .Portrait, andFillMode: 
      GPUImageFillModeType.init(1), mirrored: true) 
     HBFocusUtils.setFocus(pointOfInterest, forDevice: stillCamera.inputCamera) 
    } 

HBFocusUtils.h

#import <Foundation/Foundation.h> 
#import <UIKit/UIKit.h> 
#import <AVFoundation/AVFoundation.h> 
#import "GPUImageView.h" 

@interface HBFocusUtils : NSObject 

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored; 
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device; 
@end 


HBFocusUtils.m

#import "HBFocusUtils.h" 

@implementation HBFocusUtils 

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored; 
{ 
    CGSize frameSize = frame.size; 
    CGPoint pointOfInterest = CGPointMake(0.5, 0.5); 

    if (mirrored) 
    { 
     viewCoordinates.x = frameSize.width - viewCoordinates.x; 
    } 

    if (fillMode == kGPUImageFillModeStretch) { 
     pointOfInterest = CGPointMake(viewCoordinates.y/frameSize.height, 1.f - (viewCoordinates.x/frameSize.width)); 
    } else { 
     CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame)); 
     if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) { 
      CGPoint point = viewCoordinates; 
      CGFloat apertureRatio = apertureSize.height/apertureSize.width; 
      CGFloat viewRatio = frameSize.width/frameSize.height; 
      CGFloat xc = .5f; 
      CGFloat yc = .5f; 

      if (fillMode == kGPUImageFillModePreserveAspectRatio) { 
       if (viewRatio > apertureRatio) { 
        CGFloat y2 = frameSize.height; 
        CGFloat x2 = frameSize.height * apertureRatio; 
        CGFloat x1 = frameSize.width; 
        CGFloat blackBar = (x1 - x2)/2; 
        if (point.x >= blackBar && point.x <= blackBar + x2) { 
         xc = point.y/y2; 
         yc = 1.f - ((point.x - blackBar)/x2); 
        } 
       } else { 
        CGFloat y2 = frameSize.width/apertureRatio; 
        CGFloat y1 = frameSize.height; 
        CGFloat x2 = frameSize.width; 
        CGFloat blackBar = (y1 - y2)/2; 
        if (point.y >= blackBar && point.y <= blackBar + y2) { 
         xc = ((point.y - blackBar)/y2); 
         yc = 1.f - (point.x/x2); 
        } 
       } 
      } else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) { 
       if (viewRatio > apertureRatio) { 
        CGFloat y2 = apertureSize.width * (frameSize.width/apertureSize.height); 
        xc = (point.y + ((y2 - frameSize.height)/2.f))/y2; 
        yc = (frameSize.width - point.x)/frameSize.width; 
       } else { 
        CGFloat x2 = apertureSize.height * (frameSize.height/apertureSize.width); 
        yc = 1.f - ((point.x + ((x2 - frameSize.width)/2))/x2); 
        xc = point.y/frameSize.height; 
       } 
      } 

      pointOfInterest = CGPointMake(xc, yc); 
     } 
    } 

    return pointOfInterest; 
} 

+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device 
{ 
    if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) 
    { 
     NSError *error; 
     if ([device lockForConfiguration:&error]) 
     { 
      [device setFocusPointOfInterest:focus]; 
      [device setFocusMode:AVCaptureFocusModeAutoFocus]; 
      [device unlockForConfiguration]; 
     } 
    } 

    if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose]) 
    { 
     NSError *error; 
     if ([device lockForConfiguration:&error]) 
     { 
      [device setExposurePointOfInterest:focus]; 
      [device setExposureMode:AVCaptureExposureModeAutoExpose]; 
      [device unlockForConfiguration]; 
     } 
    } 
} 
@end