2017-04-04 2 views
1

Je souhaite diffuser la vidéo locale d'un appareil sur Chromecast à l'aide de PhotoKit Framework, mais seul l'écran de chargement s'affiche sur Chromecast et aucune vidéo n'est lue. Si remplacer avUrlAsset.url.absoluteString par http_url_of_video qu'il joue la vidéo avec succès.iOS - Diffuser de la vidéo sur Chromecast à l'aide de PhotoKit

code

let options = PHVideoRequestOptions() 
    options.isNetworkAccessAllowed = true 
    options.deliveryMode = .automatic 

    // create a meta data 
    let metadata = GCKMediaMetadata(metadataType: .movie) 
    metadata.setString("Title", forKey: kGCKMetadataKeyTitle) 
    metadata.setString("Subtitle", forKey: kGCKMetadataKeySubtitle) 

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in 

     if let avUrlAsset = avAsset as? AVURLAsset { 

      // create media information 
      let mediaInfo = GCKMediaInformation(contentID: avUrlAsset.url.absoluteString, 
               streamType: .buffered, 
               contentType: "video/quicktime", 
               metadata: metadata, 
               streamDuration: 0, 
               customData: nil) 

      self._remotMediaClient?.loadMedia(mediaInfo, autoplay: true) 

     } 
    }) 

S'il vous plaît me suggérer comment puis-je lire la vidéo locale à chromecast. J'essaie également de copier la vidéo dans le répertoire de documents et de transmettre l'URL de la vidéo copiée à Cromecast mais ne fonctionne pas.

+0

Salut Kirit, Avez-vous trouvé de la chance en faisant cela? Même je souhaite faire une application simple pour mon usage personnel où je peux charger des vidéos en utilisant iTunes et le jeter sur mon chromecast. Faites-moi savoir si vous avez réussi à le faire. – Vineet

+0

@Vineet Nous avons besoin de configurer le serveur local pour lire la vidéo locale dans Crome cast –

Répondre

0

je résous à l'aide locale http server

HttpServerManager.swift

import UIKit 

class HttpServerManager: NSObject { 

static let shared = HttpServerManager() 

private var httpServer:HTTPServer! 

override init() { 
    super.init() 

    // Create server using our custom MyHTTPServer class 
    httpServer = HTTPServer() 

    // Tell the server to broadcast its presence via Bonjour. 
    // This allows browsers such as Safari to automatically discover our service. 
    httpServer.setType("_http._tcp.") 

    // Normally there's no need to run our server on any specific port. 
    // Technologies like Bonjour allow clients to dynamically discover the server's port at runtime. 
    // However, for easy testing you may want force a certain port so you can just hit the refresh button. 
    // [httpServer setPort:12345]; 


    let documentsDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first 
    httpServer.setDocumentRoot(documentsDirectory) 
} 

func startServer() { 

    // Start the server (and check for problems) 
    do{ 
     try httpServer?.start() 
     DDLogWrapper.logInfo("Started HTTP Server on port \(httpServer?.listeningPort())") 

    }catch { 
     DDLogWrapper.logError("Error starting HTTP Server: \(error) ") 
    } 
} 

func stopServer() { 
    httpServer.stop() 
} 

func getListeningPort() -> UInt16 { 
    return httpServer.listeningPort() 
} 


func setDocumentRoot(path stirng:String) { 
    httpServer.setDocumentRoot(stirng) 
} 

} 

start server dans AppDelege.swift

class AppDelegate: UIResponder, UIApplicationDelegate, GCKLoggerDelegate { 

var window: UIWindow? 
var httpServer:HTTPServer? 

func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 
    // Override point for customization after application launch. 

    GCKLogger.sharedInstance().delegate = self 

    // Configure our logging framework. 
    // To keep things simple and fast, we're just going to log to the Xcode console. 
    LoggerFactory.initLogging() 

    // start local http server 
    HttpServerManager.shared.startServer() 

    return true 
} 
} 

Voir la vidéo locale à l'aide Crome CAST ci-dessous la méthode

func playToRemotePlayer(with asset:PHAsset, forViewController viewController:UIViewController) { 

    // if video is paused than resume it 
    if _remotMediaClient?.mediaStatus?.playerState == .paused { 
     _remotMediaClient?.play() 
     return 
    } 

    // lets keep track of recent assets that is played on Crome cast 
    if recentPlayedAsset == nil { 
     recentPlayedAsset = asset 
    }else{ 

     if recentPlayedAsset == asset { 
      self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true) 
      return 

     }else{ 
      recentPlayedAsset = asset 
     } 

    } 

    let options = PHVideoRequestOptions() 
    options.isNetworkAccessAllowed = true 
    options.deliveryMode = .highQualityFormat 
    options.version = .original 

    // create a meta data 
    let metadata = GCKMediaMetadata(metadataType: .movie) 
    metadata.setString("you video title", forKey: kGCKMetadataKeyTitle) 
    metadata.setString("you video subtitle", forKey: kGCKMetadataKeySubtitle) 

    PHImageManager.default().requestAVAsset(forVideo: asset, options: options, resultHandler: { (avAsset, audioMix, info) in 

     if (avAsset as? AVURLAsset) != nil { 

      let startDate = NSDate() 

      //Create Export session 
      let exportSession = AVAssetExportSession(asset: avAsset!, presetName: AVAssetExportPresetHighestQuality) 

      let filePathURL = documentDirectoryUrl.appendingPathComponent("rendered_video.mp4") 
      let filePath = NSURL(string: (filePathURL?.absoluteString)!) 
      CommanUtilites.deleteFile(filePath: filePath!) 

      exportSession!.outputURL = filePath as URL? 
      exportSession!.outputFileType = AVFileTypeMPEG4 
      exportSession!.shouldOptimizeForNetworkUse = true 
      let start = CMTimeMakeWithSeconds(0.0, 0) 
      let range = CMTimeRangeMake(start, (avAsset?.duration)!) 
      exportSession?.timeRange = range 

      print("Exporting Media...") 

      DispatchQueue.main.async { 
       self.progressHUD = MBProgressHUD.showAdded(to: viewController.view, animated: true) 
       self.progressHUD?.mode = MBProgressHUDMode.indeterminate 
       self.progressHUD?.label.text = "Exporting video please wait..." 
      } 

      exportSession!.exportAsynchronously(completionHandler: {() -> Void in 

       DispatchQueue.main.async { 
        self.progressHUD?.hide(animated: true) 
       } 

       switch exportSession!.status { 

       case .failed: 
        print("Error : " + (exportSession?.error?.localizedDescription)!) 
       case .cancelled: 
        print("Export canceled") 
       case .completed: 
        //Video conversion finished 
        let endDate = NSDate() 

        let time = endDate.timeIntervalSince(startDate as Date) 
        print(time) 
        print("Export Successful!") 
        print(exportSession?.outputURL?.path ?? "") 

        let port = String(HttpServerManager.shared.getListeningPort()) 

        let videoHttpUrl = "http://127.0.0.1:" + port + "/rendered_video.mp4" 

        // create media information 
        self.recentMediaInfo = GCKMediaInformation(contentID: videoHttpUrl, 
                   streamType: .buffered, 
                   contentType: "video/mp4", 
                   metadata: nil, 
                   streamDuration: (avAsset?.duration.seconds)!, 
                   customData: nil) 

        self._remotMediaClient?.loadMedia(self.recentMediaInfo!, autoplay: true) 


       default: 
        break 
       } 

      }) 
     } 
    }) 

} 
+0

Très bien Merci Kirit .. Je vais certainement essayer cela! :) – Vineet