0

Je rencontre des problèmes pour enregistrer une vidéo à l'aide du code fourni. J'utilise un exemple de code créé pour l'enregistrement vidéo.Enregistrer une vidéo avec AVFoundation dans Swift pour iOS

Plus précisément, je suis incapable de compiler cette ligne sans cette erreur: « Impossible de convertir la valeur de type « ViewController » type spécifié « AVCaptureFileOutputRecordingDelegate »

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

Cette ligne est située dans une fonction IBAction:

@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

Reste du code est ici:

import UIKit 
import AVFoundation 
import Darwin 




class ViewController: UIViewController { 



@IBOutlet weak var CameraView: UIImageView! 

@IBOutlet weak var RecordButton: UIButton! 

@IBOutlet weak var SelectFrButton: UIButton! 

@IBOutlet weak var ISOslider: UISlider! 

@IBOutlet weak var SSslider: UISlider! 

@IBOutlet weak var ISOtextfield: UITextField! 

@IBOutlet weak var SStextfield: UITextField! 

@IBOutlet weak var TorchSlider: UISlider! 

@IBOutlet weak var Torchtextfield: UITextField! 

var captureSession = AVCaptureSession(); 
var DisplaySessionOutput = AVCaptureVideoDataOutput(); 
var SaveSessionOutput = AVCaptureMovieFileOutput(); 
var previewLayer = AVCaptureVideoPreviewLayer(); 
var CaptureDevice:AVCaptureDevice? = nil; 
var CurrentTorchLevel:Float = 0.5; 


override func viewDidLoad() { 
    super.viewDidLoad() 

    captureSession.sessionPreset = AVCaptureSessionPresetHigh 
    // Loop through all the capture devices on this phone 

    let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified) 

    for device in (deviceDiscoverySession?.devices)! { 
     if(device.position == AVCaptureDevicePosition.back){ 
      do{ 

       try device.lockForConfiguration() 


       device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in 

        // Set text and sliders to correct levels 
        self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!; 
        self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!; 

        self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!); 
        self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!); 

        self.ISOtextfield.text = device.iso.description; 
        self.ISOslider.setValue(device.iso, animated: false) 

        self.SStextfield.text = device.exposureDuration.seconds.description; 
        self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false); 

        self.TorchSlider.minimumValue = 0.01; 
        self.TorchSlider.maximumValue = 1; 
        self.TorchSlider.value = 0.5; 
        self.Torchtextfield.text = "0.5"; 
       }) 




       //Turn torch on 

       if (device.torchMode == AVCaptureTorchMode.on) { 
        device.torchMode = AVCaptureTorchMode.off 
       } else { 
        try device.setTorchModeOnWithLevel(1.0) 

       } 

       device.unlockForConfiguration(); 

       CaptureDevice = device; 

       let input = try AVCaptureDeviceInput(device: CaptureDevice) 
       if(captureSession.canAddInput(input)){ 
        captureSession.addInput(input); 

        if(captureSession.canAddOutput(DisplaySessionOutput)){ 
         captureSession.addOutput(DisplaySessionOutput); 
         previewLayer = AVCaptureVideoPreviewLayer(session: captureSession); 
         previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
         previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait; 
         CameraView.layer.addSublayer(previewLayer); 
        } 
       } 
      } 
      catch{ 
       print("exception!"); 
      } 
     } 
    } 

    CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1); 

    captureSession.startRunning() 


} 

    // Do any additional setup after loading the view, typically from a nib. 


override func viewDidLayoutSubviews() { 

    previewLayer.frame = CameraView.bounds 

} 


override func didReceiveMemoryWarning() { 
    super.didReceiveMemoryWarning() 
    // Dispose of any resources that can be recreated. 
} 


@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

@IBAction func ISOvaluechanged(_ sender: Any) { 

    SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func SSvaluechanged(_ sender: Any) { 

    let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000); 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func ISOtextchanged(_ sender: Any) { 

} 

@IBAction func SStextchanged(_ sender: Any) { 

    //let time = CMTimeMake(Int64(exposurelevel * 100000),100000); 

} 


@IBAction func ChooseButtonPressed(_ sender: Any) { 
} 

func ShowAlert(AlertMessage: String) { 

    let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert) 

    self.present(alertController, animated: true, completion:nil) 

    let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in 
    } 

    alertController.addAction(OKAction) 

} 

@IBAction func TorchSliderChanged(_ sender: Any) { 

    CurrentTorchLevel = self.TorchSlider.value; 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel); 
} 

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) { 

    var newISOval = isolevel; 
    var newSSval = exposurelevel; 
    let newTorchVal = TorchLevel; 

    if(newISOval == FLT_MAX){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.maxISO)!; 
    } 

    else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.minISO)!; 
    } 

    if(newSSval.timescale == 0){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!; 
    } 

    else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!; 
    } 



     do { 

     try self.CaptureDevice?.lockForConfiguration(); 

     try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal); 

     CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in 

      // Set text and sliders to correct levels 
      self.ISOtextfield.text = self.CaptureDevice?.iso.description; 
      self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false) 

      self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description; 
      self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false); 

      self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false); 
      self.Torchtextfield.text = self.CurrentTorchLevel.description; 

     }) 

     self.CaptureDevice?.unlockForConfiguration(); 

    } 

    catch { 
     ShowAlert(AlertMessage: "Unable to set camera settings"); 
     self.CaptureDevice?.unlockForConfiguration(); 


    } 

} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 
    return 
} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { 
    return 
} 

} 

Tha nk vous pour toute aide que vous pouvez fournir!

Répondre

1

Créez une extension pour votre UIViewController afin de le rendre conforme à AVCaptureFileOutputRecordingDelegate. Supprimez et ajoutez les deux dernières méthodes de votre classe ViewController.

class ViewController:UIViewController { 
     //your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate 
    } 

    extension ViewController: AVCaptureFileOutputRecordingDelegate { 
    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 

Vous pouvez faire la même chose en étendant votre UIViewController comme ci-dessous, mais je pensais que je vous donne une solution propre comme ci-dessus. Tu peux choisir.

class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate { 
    //your methods as usual but you keep your final two methods this time 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 
+0

Salut, merci pour l'aide. J'ai essayé la deuxième solution mais j'ai reçu l'erreur: "Type 'ViewController' ne confirme pas au protocole 'AVCaptureFileOutputRecordingDelegate'" – aforward

+0

Pour se conformer à AVCaptureFileOutputRecordingDelegate vous avez besoin des deux méthodes captureOutput dans ViewController. J'ai mis à jour ma solution pour les inclure plus clairement. Les avez-vous toujours là? La première erreur que vous avez eue était que View Controller n'a pas étendu AVCaptureFileOutputRecordingDelegate alors quand vous avez utilisé 'self' dans votre @IBAction, Xcode essayait de convertir un UIViewController en AVCaptureFileOutputRecordingDelegate. – gwinyai

+0

Compris. Les méthodes ont changé dans Swift 3.0: https://developer.apple.com/reference/avfoundation/avcapturefileoutputrecordingdelegate (Je suggère de changer votre réponse pour le dépannage à venir.) Merci encore pour votre aide! – aforward