[Live-devel] RTSP (RTP) -> H.264ES -> MPEG2-TS muxing problem

Umar Qureshey umar at janteq.com
Wed Nov 13 15:16:15 PST 2013


Hi,
I am attempting to grab an H.264 ES stream from a streaming IP camera and mux it to a TS and save to a file.  I have modified the testRTSPClient.cpp demo program to accomplish this.
Basically, I modified it such that I get the H.264-ES, then I pass it through the H264VideoStreamDiscreteFramer filter, then I pass it through MPEG2TransportStreamFromESSource filter, and then I pass it to FileSink.  The program runs fine and creates a TS file but I am unable to play it on VLC and my TS analyzer reports bad video data.

Here are the relevant changes I have made to testRTSPClient.cpp (modifications are prefaced by UQ); most of the action is in continueAfterSETUP() and I have posted only the functions I have modified but a full patch is also pasted at the end:

class ourRTSPClient: public RTSPClient {
public:
    static ourRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL,
				  int verbosityLevel = 0,
				  char const* applicationName = NULL,
				  portNumBits tunnelOverHTTPPortNum = 0);

protected:
    ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
		int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum);
    // called only by createNew();
    virtual ~ourRTSPClient();

public:
    StreamClientState scs;
    //UQ 11/13/2013  Declare H.264-ES to MPEG2-TS converter.
    MPEG2TransportStreamFromESSource *outputTSStream;
    //UQ 11/13/2013  Declare H.264-ES Framer.
    H264VideoStreamDiscreteFramer* H264framer;
};

void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) {
  do {
    UsageEnvironment& env = rtspClient->envir(); // alias
    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

    if (resultCode != 0) {
      env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n";
      break;
    }

    env << *rtspClient << "Set up the \"" << *scs.subsession
	<< "\" subsession (client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1 << ")\n";

    // Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it.
    // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
    // after we've sent a RTSP "PLAY" command.)

//UQ 11/12/2013  Replace with FileSink.
#if 0        
    scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url());
#endif    
    scs.subsession->sink = FileSink::createNew(env, "/tmp/live555.ts", 2000000);

      // perhaps use your own custom "MediaSink" subclass instead
    if (scs.subsession->sink == NULL) {
      env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
	  << "\" subsession: " << env.getResultMsg() << "\n";
      break;
    }

    env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n";
    scs.subsession->miscPtr = rtspClient; // a hack to let subsession handle functions get the "RTSPClient" from the subsession     
    env << "Video source is " << scs.subsession->mediumName() << "\n";
    
    ((ourRTSPClient*)rtspClient)->H264framer = H264VideoStreamDiscreteFramer::createNew(env, scs.subsession->readSource());
    
    //UQ 11/13/2013  Add H.264 Video source to MPEG2-TS converter.
    ((ourRTSPClient*)rtspClient)->outputTSStream->addNewVideoSource(((ourRTSPClient*)rtspClient)->H264framer, 5);
    
//UQ 11/13/2013  We don't want to save ES data so comment this out.
#if 0
    scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
				       subsessionAfterPlaying, scs.subsession);
#endif
    //UQ 11/13/2013  Save the muxed TS file.
    scs.subsession->sink->startPlaying(*((ourRTSPClient*)rtspClient)->outputTSStream,
                                       subsessionAfterPlaying, scs.subsession);
    // Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
    if (scs.subsession->rtcpInstance() != NULL) {
      scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession);
    }
  } while (0);
  delete[] resultString;

  // Set up the next subsession, if any:
  setupNextSubsession(rtspClient);
}

void subsessionAfterPlaying(void* clientData) {
  MediaSubsession* subsession = (MediaSubsession*)clientData;
  RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);

  // Begin by closing this subsession's stream:
  Medium::close(subsession->sink);
  subsession->sink = NULL;
  
  //UQ 11/13/2013  Release H264framer resources.
  Medium::close(((ourRTSPClient*)rtspClient)->H264framer);

  // Next, check whether *all* subsessions' streams have now been closed:
  MediaSession& session = subsession->parentSession();
  MediaSubsessionIterator iter(session);
  while ((subsession = iter.next()) != NULL) {
    if (subsession->sink != NULL) return; // this subsession is still active
  }

  // All subsessions' streams have now been closed, so shutdown the client:
  shutdownStream(rtspClient);
}

void shutdownStream(RTSPClient* rtspClient, int exitCode) {
  UsageEnvironment& env = rtspClient->envir(); // alias
  StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

  // First, check whether any subsessions have still to be closed:
  if (scs.session != NULL) { 
    Boolean someSubsessionsWereActive = False;
    MediaSubsessionIterator iter(*scs.session);
    MediaSubsession* subsession;

    while ((subsession = iter.next()) != NULL) {
      if (subsession->sink != NULL) {
	Medium::close(subsession->sink);
	subsession->sink = NULL;
        //UQ 11/13/2013  Release H264framer resources.
        Medium::close(((ourRTSPClient*)rtspClient)->H264framer);

	if (subsession->rtcpInstance() != NULL) {
	  subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
	}

	someSubsessionsWereActive = True;
      }
    }

    if (someSubsessionsWereActive) {
      // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
      // Don't bother handling the response to the "TEARDOWN".
      rtspClient->sendTeardownCommand(*scs.session, NULL);
    }
  }

  env << *rtspClient << "Closing the stream.\n";
  Medium::close(rtspClient);
    // Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.

  if (--rtspClientCount == 0) {
    // The final stream has ended, so exit the application now.
    // (Of course, if you're embedding this code into your own application, you might want to comment this out,
    // and replace it with "eventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop, and continue running "main()".)
    exit(exitCode);
  }
}

ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
			     int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
  : RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1) {
        //UQ 11/13/2013  Instantiate MPEG2 TS muxer.
        outputTSStream = MPEG2TransportStreamFromESSource::createNew(env);
}

ourRTSPClient::~ourRTSPClient() {
    //UQ 11/13/2013  Free MPEG2 TS muxer instance.
    Medium::close(outputTSStream);
}

----- FULL PATCH -----
--- testRTSPClient.cpp.orig	2013-11-12 15:31:25.318574152 -0800
+++ testRTSPClient.cpp	2013-11-13 14:22:48.505537734 -0800
@@ -128,8 +128,15 @@
 
 public:
   StreamClientState scs;
+    //UQ 11/13/2013  Declare H.264-ES to MPEG2-TS converter.
+    MPEG2TransportStreamFromESSource *outputTSStream;
+    //UQ 11/13/2013  Declare H.264-ES Framer.
+    H264VideoStreamDiscreteFramer* H264framer;
 };
 
+
+//UQ 11/12/2013  Remove DummySink and replace with FileSink.
+#if 0
 // Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream').
 // In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video.
 // Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application).
@@ -162,6 +169,8 @@
   MediaSubsession& fSubsession;
   char* fStreamId;
 };
+#endif
+
 
 #define RTSP_CLIENT_VERBOSITY_LEVEL 1 // by default, print verbose output from each "RTSPClient"
 
@@ -274,7 +283,12 @@
     // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
     // after we've sent a RTSP "PLAY" command.)
 
+//UQ 11/12/2013  Replace with FileSink.
+#if 0        
     scs.subsession->sink = DummySink::createNew(env, *scs.subsession, rtspClient->url());
+#endif    
+    scs.subsession->sink = FileSink::createNew(env, "/tmp/live555.ts", 2000000);
+
       // perhaps use your own custom "MediaSink" subclass instead
     if (scs.subsession->sink == NULL) {
       env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
@@ -284,8 +298,21 @@
 
     env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n";
     scs.subsession->miscPtr = rtspClient; // a hack to let subsession handle functions get the "RTSPClient" from the subsession 
+    env << "Video source is " << scs.subsession->mediumName() << "\n";
+    
+    ((ourRTSPClient*)rtspClient)->H264framer = H264VideoStreamDiscreteFramer::createNew(env, scs.subsession->readSource());
+    
+    //UQ 11/13/2013  Add H.264 Video source to MPEG2-TS converter.
+    ((ourRTSPClient*)rtspClient)->outputTSStream->addNewVideoSource(((ourRTSPClient*)rtspClient)->H264framer, 5);
+    
+//UQ 11/13/2013  We don't want to save ES data so comment this out.
+#if 0
     scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
 				       subsessionAfterPlaying, scs.subsession);
+#endif
+    //UQ 11/13/2013  Save the muxed TS file.
+    scs.subsession->sink->startPlaying(*((ourRTSPClient*)rtspClient)->outputTSStream,
+                                       subsessionAfterPlaying, scs.subsession);
     // Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
     if (scs.subsession->rtcpInstance() != NULL) {
       scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession);
@@ -347,6 +374,9 @@
   Medium::close(subsession->sink);
   subsession->sink = NULL;
 
+  //UQ 11/13/2013  Release H264framer resources.
+  Medium::close(((ourRTSPClient*)rtspClient)->H264framer);
+
   // Next, check whether *all* subsessions' streams have now been closed:
   MediaSession& session = subsession->parentSession();
   MediaSubsessionIterator iter(session);
@@ -393,6 +423,8 @@
       if (subsession->sink != NULL) {
 	Medium::close(subsession->sink);
 	subsession->sink = NULL;
+        //UQ 11/13/2013  Release H264framer resources.
+        Medium::close(((ourRTSPClient*)rtspClient)->H264framer);
 
 	if (subsession->rtcpInstance() != NULL) {
 	  subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
@@ -432,9 +464,13 @@
 ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
 			     int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
   : RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1) {
+        //UQ 11/13/2013  Instantiate MPEG2 TS muxer.
+        outputTSStream = MPEG2TransportStreamFromESSource::createNew(env);
 }
 
 ourRTSPClient::~ourRTSPClient() {
+    //UQ 11/13/2013  Free MPEG2 TS muxer instance.
+    Medium::close(outputTSStream);
 }
 
 
@@ -456,6 +492,8 @@
 }
 
 
+//UQ 11/12/2013  Remove DummySink and replace with FileSink.
+#if 0
 // Implementation of "DummySink":
 
 // Even though we're not going to be doing anything with the incoming data, we still need to receive it.
@@ -519,3 +557,5 @@
                         onSourceClosure, this);
   return True;
 }
+#endif
+



More information about the live-devel mailing list