0

Comment puis-je effectuer une détection de visages en temps réel comme le fait la caméra? comme une forme ronde blanche autour et sur le visage. J'utilise AVCapturSession. J'ai trouvé que l'image que j'avais enregistrée pour la détection faciale. Ci-dessous j'ai joint mon code actuel. il ne capture l'image que lorsque j'appuie sur le bouton et l'enregistre dans la galerie de photos. certains s'il vous plaît aidez-moi à créer une forme ronde en temps réel selon le visage de la personne!Détection de visages en temps réel avec caméra sur swift 3

Code

class CameraFaceRecongnitionVC: UIViewController { 

    @IBOutlet weak var imgOverlay: UIImageView! 
    @IBOutlet weak var btnCapture: UIButton! 

    let captureSession = AVCaptureSession() 
    let stillImageOutput = AVCaptureStillImageOutput() 
    var previewLayer : AVCaptureVideoPreviewLayer? 

    // If we find a device we'll store it here for later use 
    var captureDevice : AVCaptureDevice? 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     btnCapture.CameraButton() 
     roundButton.RoundButtonForFaceRecong() 

     // Do any additional setup after loading the view, typically from a nib. 
     captureSession.sessionPreset = AVCaptureSessionPresetHigh 

     if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] { 
      // Loop through all the capture devices on this phone 
      for device in devices { 
       // Make sure this particular device supports video 
       if (device.hasMediaType(AVMediaTypeVideo)) { 
      // Finally check the position and confirm we've got the front camera 
        if(device.position == AVCaptureDevicePosition.front) { 
         captureDevice = device 
         if captureDevice != nil { 
          print("Capture device found") 
          beginSession() 
         } 
        } 
       } 
      } 
     } 
    } 

    @IBAction func actionCameraCapture(_ sender: AnyObject) { 

     print("Camera button pressed") 
     saveToCamera() 
    } 

    func beginSession() { 

     do { 
      try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice)) 
      stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] 

      if captureSession.canAddOutput(stillImageOutput) { 
       captureSession.addOutput(stillImageOutput) 
      } 

     } 
     catch { 
      print("error: \(error.localizedDescription)") 
     } 

     guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else { 
      print("no preview layer") 
      return 
     } 

     self.view.layer.addSublayer(previewLayer) 
     previewLayer.frame = self.view.layer.frame 
     captureSession.startRunning() 

     // self.view.addSubview(navigationBar) 
     self.view.addSubview(imgOverlay) 
     self.view.addSubview(btnCapture) 
    } 

    func saveToCamera() { 

     if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { 

      stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in 
       if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) { 

        if let cameraImage = UIImage(data: imageData) { 

         UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil) 
        } 
       } 
      }) 
     } 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
     // Dispose of any resources that can be recreated. 
    } 

} 

Répondre

0

J'ai trouvé une solution à l'aide AVFoundation qui va créer visage carré de suivi en temps réel sur iOS. J'ai modifié du code ici.

import UIKit 
import AVFoundation 

class DetailsView: UIView { 
    func setup() { 
     layer.borderColor = UIColor.red.withAlphaComponent(0.7).cgColor 
     layer.borderWidth = 5.0 
    } 
} 


class ViewController: UIViewController { 

    let stillImageOutput = AVCaptureStillImageOutput() 

    var session: AVCaptureSession? 
    var stillOutput = AVCaptureStillImageOutput() 
    var borderLayer: CAShapeLayer? 

    let detailsView: DetailsView = { 
     let detailsView = DetailsView() 
     detailsView.setup() 

     return detailsView 
    }() 

    lazy var previewLayer: AVCaptureVideoPreviewLayer? = { 
     var previewLay = AVCaptureVideoPreviewLayer(session: self.session!) 
     previewLay?.videoGravity = AVLayerVideoGravityResizeAspectFill 

     return previewLay 
    }() 

    lazy var frontCamera: AVCaptureDevice? = { 
     guard let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] else { return nil } 

     return devices.filter { $0.position == .front }.first 
    }() 

    let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy : CIDetectorAccuracyLow]) 

    override func viewDidLayoutSubviews() { 
     super.viewDidLayoutSubviews() 
     previewLayer?.frame = view.frame 
    } 

    override func viewDidAppear(_ animated: Bool) { 
     super.viewDidAppear(animated) 
     guard let previewLayer = previewLayer else { return } 

     view.layer.addSublayer(previewLayer) 
     view.addSubview(detailsView) 
     view.bringSubview(toFront: detailsView) 
    } 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     sessionPrepare() 
     session?.startRunning() 
    } 
    //function to store image 
    func saveToCamera() { 

     if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { 

      stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in 
       if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) { 

        if let cameraImage = UIImage(data: imageData) { 

         UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil) 
        } 
       } 
      }) 
     } 
    } 
} 

extension ViewController { 

    func sessionPrepare() { 
     session = AVCaptureSession() 

     guard let session = session, let captureDevice = frontCamera else { return } 

     session.sessionPreset = AVCaptureSessionPresetPhoto 


     do { 
      let deviceInput = try AVCaptureDeviceInput(device: captureDevice) 
      session.beginConfiguration() 
      stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] 

      if session.canAddOutput(stillImageOutput) { 
       session.addOutput(stillImageOutput) 
      } 

      if session.canAddInput(deviceInput) { 
       session.addInput(deviceInput) 
      } 

      let output = AVCaptureVideoDataOutput() 
      output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 

      output.alwaysDiscardsLateVideoFrames = true 

      if session.canAddOutput(output) { 
       session.addOutput(output) 
      } 

      session.commitConfiguration() 

      let queue = DispatchQueue(label: "output.queue") 
      output.setSampleBufferDelegate(self, queue: queue) 

     } catch { 
      print("error with creating AVCaptureDeviceInput") 
     } 
    } 
} 

extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate { 
    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 
     let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 
     let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate) 
     let ciImage = CIImage(cvImageBuffer: pixelBuffer!, options: attachments as! [String : Any]?) 
     let options: [String : Any] = [CIDetectorImageOrientation: exifOrientation(orientation: UIDevice.current.orientation), 
             CIDetectorSmile: true, 
             CIDetectorEyeBlink: true] 
     let allFeatures = faceDetector?.features(in: ciImage, options: options) 

     let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) 
     let cleanAperture = CMVideoFormatDescriptionGetCleanAperture(formatDescription!, false) 

     guard let features = allFeatures else { return } 

     for feature in features { 
      if let faceFeature = feature as? CIFaceFeature { 
       let faceRect = calculateFaceRect(facePosition: faceFeature.mouthPosition, faceBounds: faceFeature.bounds, clearAperture: cleanAperture) 
       update(with: faceRect) 
      } 
     } 

     if features.count == 0 { 
      DispatchQueue.main.async { 
       self.detailsView.alpha = 0.0 
      } 
     } 

    } 

    func exifOrientation(orientation: UIDeviceOrientation) -> Int { 
     switch orientation { 
     case .portraitUpsideDown: 
      return 8 
     case .landscapeLeft: 
      return 3 
     case .landscapeRight: 
      return 1 
     default: 
      return 6 
     } 
    } 

    func videoBox(frameSize: CGSize, apertureSize: CGSize) -> CGRect { 
     let apertureRatio = apertureSize.height/apertureSize.width 
     let viewRatio = frameSize.width/frameSize.height 

     var size = CGSize.zero 

     if (viewRatio > apertureRatio) { 
      size.width = frameSize.width 
      size.height = apertureSize.width * (frameSize.width/apertureSize.height) 
     } else { 
      size.width = apertureSize.height * (frameSize.height/apertureSize.width) 
      size.height = frameSize.height 
     } 

     var videoBox = CGRect(origin: .zero, size: size) 

     if (size.width < frameSize.width) { 
      videoBox.origin.x = (frameSize.width - size.width)/2.0 
     } else { 
      videoBox.origin.x = (size.width - frameSize.width)/2.0 
     } 

     if (size.height < frameSize.height) { 
      videoBox.origin.y = (frameSize.height - size.height)/2.0 
     } else { 
      videoBox.origin.y = (size.height - frameSize.height)/2.0 
     } 

     return videoBox 
    } 

    func calculateFaceRect(facePosition: CGPoint, faceBounds: CGRect, clearAperture: CGRect) -> CGRect { 
     let parentFrameSize = previewLayer!.frame.size 
     let previewBox = videoBox(frameSize: parentFrameSize, apertureSize: clearAperture.size) 

     var faceRect = faceBounds 

     swap(&faceRect.size.width, &faceRect.size.height) 
     swap(&faceRect.origin.x, &faceRect.origin.y) 

     let widthScaleBy = previewBox.size.width/clearAperture.size.height 
     let heightScaleBy = previewBox.size.height/clearAperture.size.width 

     faceRect.size.width *= widthScaleBy 
     faceRect.size.height *= heightScaleBy 
     faceRect.origin.x *= widthScaleBy 
     faceRect.origin.y *= heightScaleBy 

     faceRect = faceRect.offsetBy(dx: 0.0, dy: previewBox.origin.y) 
     let frame = CGRect(x: parentFrameSize.width - faceRect.origin.x - faceRect.size.width - previewBox.origin.x/2.0, y: faceRect.origin.y, width: faceRect.width, height: faceRect.height) 

     return frame 
    } 

} 
extension ViewController { 
    func update(with faceRect: CGRect) { 
     DispatchQueue.main.async { 
      UIView.animate(withDuration: 0.2) { 
       self.detailsView.alpha = 1.0 
       self.detailsView.frame = faceRect 
      } 
     } 
    } 
}