2012-08-28 3 views
2

Je veux diffuser des images JPEG ou un fichier mouvement-JPEG à travers 555 en direct. Mais le problème est que dans la mise en œuvre en direct 555 pour Jpegs n'est pas disponible. Tout le monde peut aider ??JPEG streaming avec live555

Répondre

0

espère que vous l'avez fait, mais si sans but voir ce Jpeg Streaming using live555 Cela fait la même chose que vous avez demandé à diffuser les images/Jpeg. Pour MJpegs, vous devrez faire le même processus.

2

Vous pouvez trouver une implémentation qui a été publiée dans la liste de diffusion devel http://lists.live555.com/pipermail/live-devel/2012-February/014672.html. Le code et un exemple sont disponibles mais cette modification a été rejetée par le mainteneur live555.

Nous devons d'abord implémenter un MJPEGVideoSource qui peut alimenter un JPEGVideoRTPSink.

#include "JPEGVideoSource.hh" 

class MJPEGVideoSource : public JPEGVideoSource 
{ 
     public: 
       static MJPEGVideoSource* createNew (UsageEnvironment& env, FramedSource* source) 
       { 
         return new MJPEGVideoSource(env,source); 
       } 
       virtual void doGetNextFrame() 
       { 
        if (m_inputSource) 
         m_inputSource->getNextFrame(fTo, fMaxSize, afterGettingFrameSub, this, FramedSource::handleClosure, this);      
       } 
       virtual void doStopGettingFrames() 
       { 
        FramedSource::doStopGettingFrames(); 
        if (m_inputSource) 
         m_inputSource->stopGettingFrames();      
       } 
       static void afterGettingFrameSub(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
       { 
           MJPEGVideoSource* source = (MJPEGVideoSource*)clientData; 
           source->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds); 
       }   
       void afterGettingFrame(unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
       { 
        int headerSize = 0; 
        bool headerOk = false; 
        fFrameSize = 0; 

        for (unsigned int i = 0; i < frameSize ; ++i) 
        { 
         // SOF 
         if ((i+8) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xC0) 
         { 
          m_height = (fTo[i+5]<<5)|(fTo[i+6]>>3); 
          m_width = (fTo[i+7]<<5)|(fTo[i+8]>>3); 
         } 
         // DQT 
         if ((i+5+64) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xDB) 
         { 
          if (fTo[i+4] ==0) 
          { 
           memcpy(m_qTable, fTo + i + 5, 64); 
           m_qTable0Init = true; 
          } 
          else if (fTo[i+4] ==1) 
          { 
           memcpy(m_qTable + 64, fTo + i + 5, 64); 
           m_qTable1Init = true; 
          } 
         } 
         // End of header 
         if ((i+1) < frameSize && fTo[i] == 0x3F && fTo[i+1] == 0x00) 
         { 
          headerOk = true; 
          headerSize = i+2; 
          break; 
         } 
        } 

        if (headerOk) 
        { 
         fFrameSize = frameSize - headerSize; 
         memmove(fTo, fTo + headerSize, fFrameSize); 
        } 

        fNumTruncatedBytes = numTruncatedBytes; 
        fPresentationTime = presentationTime; 
        fDurationInMicroseconds = durationInMicroseconds; 
        afterGetting(this); 
       } 
       virtual u_int8_t type() { return 1; }; 
       virtual u_int8_t qFactor() { return 128; }; 
       virtual u_int8_t width() { return m_width; }; 
       virtual u_int8_t height() { return m_height; }; 
       u_int8_t const* quantizationTables(u_int8_t& precision, u_int16_t& length) 
       { 
        length = 0; 
        precision = 0; 
        if (m_qTable0Init && m_qTable1Init) 
        { 
         precision = 8; 
         length = sizeof(m_qTable); 
        } 
        return m_qTable;    
       } 

     protected: 
       MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) : JPEGVideoSource(env), 
       m_inputSource(source), 
       m_width(0), 
       m_height(0), 
       m_qTable0Init(false), 
       m_qTable1Init(false) 
       { 
        memset(&m_qTable,0,sizeof(m_qTable)); 
       } 
       virtual ~MJPEGVideoSource() 
       { 
        Medium::close(m_inputSource); 
       } 

     protected: 
       FramedSource* m_inputSource; 
       u_int8_t  m_width; 
       u_int8_t  m_height; 
       u_int8_t  m_qTable[128]; 
       bool   m_qTable0Init; 
       bool   m_qTable1Init; 
}; 

Ensuite, nous pouvons l'utiliser comme source vidéo afin de construire un simple serveur RTSP:

#include "liveMedia.hh" 
#include "BasicUsageEnvironment.hh" 
#include "GroupsockHelper.hh" 
#include "MJPEGVideoSource.hh" 

char const* inputFileName = "test.mjpeg"; 

int main(int argc, char** argv) { 
    // Begin by setting up our usage environment: 
    TaskScheduler* scheduler = BasicTaskScheduler::createNew(); 
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); 

    // Create 'groupsocks' for RTP and RTCP: 
    struct in_addr destinationAddress; 
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env); 

    const unsigned short rtpPortNum = 18888; 
    const unsigned short rtcpPortNum = rtpPortNum+1; 
    const unsigned char ttl = 255; 

    const Port rtpPort(rtpPortNum); 
    const Port rtcpPort(rtcpPortNum); 

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl); 
    rtpGroupsock.multicastSendOnly(); // we're a SSM source 
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl); 
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source 

    // Create a 'JPEG Video RTP' sink from the RTP 'groupsock': 
    RTPSink* videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock); 

    // Create (and start) a 'RTCP instance' for this RTP sink: 
    const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share 
    const unsigned maxCNAMElen = 100; 
    unsigned char CNAME[maxCNAMElen+1]; 
    gethostname((char*)CNAME, maxCNAMElen); 
    CNAME[maxCNAMElen] = '\0'; // just in case 
    RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock, 
       estimatedSessionBandwidth, CNAME, 
       videoSink, NULL /* we're a server */, 
       True /* we're a SSM source */); 
    // Note: This starts RTCP running automatically 

    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); 
    if (rtspServer == NULL) { 
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; 
    exit(1); 
    } 
    ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"", 
         True /*SSM*/); 
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp)); 
    rtspServer->addServerMediaSession(sms); 

    char* url = rtspServer->rtspURL(sms); 
    *env << "Play this stream using the URL \"" << url << "\"\n"; 
    delete[] url; 

    // Start the streaming: 
    *env << "Beginning streaming...\n"; 
    // Open the input file as a 'byte-stream file source': 
    ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, inputFileName); 
    if (fileSource == NULL) { 
    *env << "Unable to open file \"" << inputFileName 
    << "\" as a byte-stream file source\n"; 
    exit(1); 
    } 

    // Create the MJPEG video source: 
    MJPEGVideoSource* videoSource = MJPEGVideoSource::createNew(*env, fileSource); 

    // Finally, start playing: 
    *env << "Beginning to read from file...\n"; 
    videoSink->startPlaying(*videoSource, NULL, NULL); 

    env->taskScheduler().doEventLoop(); 

    return 0; 
}