2017-07-18 2 views
2

Je suis novice en matière de rapidité et tente de créer une application pour appareil photo qui peut appliquer des filtres en temps réel et enregistrer avec les filtres appliqués. Jusqu'à présent, je peux prévisualiser en temps réel avec les filtres appliqués, mais quand je sauvegarde la vidéo tout est noir.Enregistrer des vidéos avec des filtres en temps réel dans Swift

import UIKit 
import AVFoundation 
import AssetsLibrary 
import CoreMedia 
import Photos 

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate { 

    var captureSession: AVCaptureSession! 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet weak var recordButtton: UIButton! 
    @IBOutlet weak var imageView: UIImageView! 

    var assetWriter: AVAssetWriter? 
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor? 
    var isWriting = false 
    var currentSampleTime: CMTime? 
    var currentVideoDimensions: CMVideoDimensions? 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     FilterVendor.register() 
     setupCaptureSession() 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
    } 

    func setupCaptureSession() { 
     let captureSession = AVCaptureSession() 
     captureSession.sessionPreset = AVCaptureSessionPresetPhoto 

     guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else { 
      print("Can't access the camera") 
      return 
     } 

     if captureSession.canAddInput(input) { 
      captureSession.addInput(input) 
     } 

     let videoOutput = AVCaptureVideoDataOutput() 

     videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) 
     if captureSession.canAddOutput(videoOutput) { 
      captureSession.addOutput(videoOutput) 
     } 

     let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     if((previewLayer) != nil) { 
      view.layer.addSublayer(previewLayer!) 
     } 

     captureSession.startRunning() 
    } 

    @IBAction func record(_ sender: Any) { 
     if isWriting { 
      print("stop record") 
      self.isWriting = false 
      assetWriterPixelBufferInput = nil 
      assetWriter?.finishWriting(completionHandler: {[unowned self]() -> Void in 
       self.saveMovieToCameraRoll() 
      }) 
     } else { 
      print("start record") 
      createWriter() 
      assetWriter?.startWriting() 
      assetWriter?.startSession(atSourceTime: currentSampleTime!) 
      isWriting = true 
     } 
    } 

    func saveMovieToCameraRoll() { 
     PHPhotoLibrary.shared().performChanges({ 
      PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL) 
     }) { saved, error in 
      if saved { 
       print("saved") 
      } 
     } 
    } 

    func movieURL() -> NSURL { 
     let tempDir = NSTemporaryDirectory() 
     let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov") 
     return url! as NSURL 
    } 

    func checkForAndDeleteFile() { 
     let fm = FileManager.default 
     let url = movieURL() 
     let exist = fm.fileExists(atPath: url.path!) 

     if exist { 
      do { 
       try fm.removeItem(at: url as URL) 
      } catch let error as NSError { 
       print(error.localizedDescription) 
      } 
     } 
    } 

    func createWriter() { 
     self.checkForAndDeleteFile() 

     do { 
      assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie) 
     } catch let error as NSError { 
      print(error.localizedDescription) 
      return 
     } 

     let outputSettings = [ 
      AVVideoCodecKey : AVVideoCodecH264, 
      AVVideoWidthKey : Int(currentVideoDimensions!.width), 
      AVVideoHeightKey : Int(currentVideoDimensions!.height) 
     ] as [String : Any] 

     let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject]) 
     assetWriterVideoInput.expectsMediaDataInRealTime = true 
     assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI/2.0)) 

     let sourcePixelBufferAttributesDictionary = [ 
      String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA), 
      String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width), 
      String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height), 
      String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue 
     ] as [String : Any] 

     assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, 
                      sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) 

     if assetWriter!.canAdd(assetWriterVideoInput) { 
      assetWriter!.add(assetWriterVideoInput) 
     } else { 
      print("no way\(assetWriterVideoInput)") 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
     autoreleasepool { 

      connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

      guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 
      let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

      let filter = CIFilter(name: "Fİlter")! 
      filter.setValue(cameraImage, forKey: kCIInputImageKey) 


      let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
      self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
      self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

      if self.isWriting { 
       if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
        var newPixelBuffer: CVPixelBuffer? = nil 

        CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

        let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

        if success == false { 
         print("Pixel Buffer failed") 
        } 
       } 
      } 

      DispatchQueue.main.async { 

       if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
        let filteredImage = UIImage(ciImage: outputValue) 
        self.imageView.image = filteredImage 
       } 
      } 
     } 
    } 
} 
+0

Avez-vous essayé d'enregistrer une vidéo sans filtre? – Simon

+0

@Simon pas de différence :( – hackio

Répondre

4

J'ai ajouté quelques commentaires à la partie critique ci-dessous:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
    autoreleasepool { 

     connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

     // COMMENT: This line makes sense - this is your pixelbuffer from the camera. 
     guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 

     // COMMENT: OK, so you turn pixelBuffer into a CIImage... 
     let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

     // COMMENT: And now you've create a CIImage with a Filter instruction... 
     let filter = CIFilter(name: "Fİlter")! 
     filter.setValue(cameraImage, forKey: kCIInputImageKey) 


     let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
     self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
     self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

     if self.isWriting { 
      if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
       // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write... 
       var newPixelBuffer: CVPixelBuffer? = nil 

       // COMMENT: And you grabbed memory from the pool. 
       CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

       // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame. 
       let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

       if success == false { 
        print("Pixel Buffer failed") 
       } 
      } 
     } 

     // COMMENT: And now you're sending the filtered image back to the screen. 
     DispatchQueue.main.async { 

      if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
       let filteredImage = UIImage(ciImage: outputValue) 
       self.imageView.image = filteredImage 
      } 
     } 
    } 
} 

Il me semble que vous obtenez essentiellement l'image à l'écran, créant ainsi une copie filtrée, puis faire un pixel NOUVEAU tampon qui est vide et l'écrivant.

Si vous écrivez le pixelBuffer que vous avez saisi à la place du nouveau que vous créez, vous devriez écrire l'image avec succès. Ce dont vous avez besoin pour écrire avec succès la vidéo filtrée, c'est de créer un nouveau CVPixelBuffer à partir d'un CIImage - cette solution existe déjà sur StackOverflow, je sais parce que j'avais besoin de cette étape moi-même!

+0

Pas rapide basée, mais voici le code ObjectiveC de quelques années en arrière maintenant https://stackoverflow.com/questions/22819337/adding-filters-to-video-with-avfoundation-osx- how-do-i-write-the-result-i –

+0

Merci Tim :) – hackio