<html>
  <head>
    <meta http-equiv="content-type" content="text/html;
      charset=ISO-8859-15">
  </head>
  <body bgcolor="#FFFFFF" text="#000000">
    Hi,<br>
    <br>
    i wrote my own Subclass of FramedSource and
    OnDemandServerMediaSubsession to stream a h264 video encoded by
    libav(ffmpeg). I used these classes the same way
    testOnDemandRTSPServer.cpp does (as you can see in my main). When i
    try to connect via vlc to the rtsp server my framed source gets
    created and after that destroyed directly (deliverFrame0() and
    doGetNextFrame() are not being called).<br>
    <br>
    I dont know what im doing wrong so here is my code:<br>
    <br>
    imLiveStreamSource.cpp // derivec from framedsource<br>
    ###################################################<br>
    <small><small><br>
        #include "imLiveStreamSource.h"<br>
        #include <GroupsockHelper.hh> // for gettimeofday()<br>
        <br>
        EventTriggerId imLiveStreamSource::eventTriggerId = 0;<br>
        unsigned imLiveStreamSource::mReferenceCount = 0;<br>
        <br>
        imLiveStreamSource*
        imLiveStreamSource::createNew(UsageEnvironment& env,
        imLiveStreamParameters params)<br>
        {<br>
            return new imLiveStreamSource(env, params);<br>
        }<br>
        <br>
        imLiveStreamSource::imLiveStreamSource(UsageEnvironment&
        env, imLiveStreamParameters param)<br>
            : FramedSource(env),<br>
            mReady(true),<br>
            mParameters(param),<br>
            mEncodedVideoFrame(NULL),<br>
            mEncodedVideoFrameSize(0),<br>
        //    mIOService(new boost::asio::io_service()),<br>
        //    mWork(new boost::asio::io_service::work(*mIOService)),<br>
        //    mTimer(*mIOService),<br>
            mEncodingEnabled(true),<br>
            mNextEncodedVideoFrameWanted(false)<br>
        {<br>
            if(mReferenceCount == 0)<br>
            {<br>
        <br>
                av_register_all();<br>
                mOutputFormat = av_guess_format(NULL, "test.h264",
        NULL);<br>
        <br>
                if(!mOutputFormat)<br>
                {<br>
                    std::cout << "Cannot guess output format!
        Using mpeg!" << std::endl;<br>
                    mOutputFormat = av_guess_format("mpeg", NULL, NULL);<br>
                }<br>
                if(!mOutputFormat)<br>
                {<br>
                    std::cout << "Could not find suitable output
        format." << std::endl;<br>
                    mReady = false;<br>
                }<br>
        <br>
                mContext = avformat_alloc_context();<br>
                if(!mContext)<br>
                {<br>
                    std::cout << "Cannot allocate avformat
        memory." << std::endl;<br>
                    mReady = false;<br>
                }<br>
                mContext->oformat = mOutputFormat;<br>
        <br>
                mVideoStream = NULL;<br>
                mOutputFormat->audio_codec = CODEC_ID_NONE;<br>
                mVideoStream = addVideoStream(mContext,
        mOutputFormat->video_codec);<br>
        <br>
                if(mVideoStream)<br>
                    openVideo(mContext, mVideoStream);<br>
        <br>
                for (int x = 0; x < NUMBER_OF_THREADS; x++)<br>
                {<br>
                   
        //mWorkerThreads.create_thread(boost::bind(&imLiveStreamSource::workerThread,

        this));<br>
                }<br>
        <br>
               
//mTimer.expires_from_now(boost::posix_time::seconds((int)(1/mParameters.mFrameRate)));<br>
               
        //mTimer.async_wait(boost::bind(&imLiveStreamSource::encodingThread,
        this, _1));<br>
            }<br>
            ++mReferenceCount;<br>
        <br>
            // TODO: local init stuff<br>
        <br>
        <br>
            if(eventTriggerId == 0) <br>
            {<br>
                eventTriggerId =
        envir().taskScheduler().createEventTrigger(deliverFrame0);<br>
            }<br>
        }<br>
        <br>
        imLiveStreamSource::~imLiveStreamSource() <br>
        {<br>
          // Any instance-specific 'destruction' (i.e., resetting) of
        the device would be done here:<br>
          //%%% TO BE WRITTEN %%%<br>
        <br>
            --mReferenceCount;<br>
            if(mReferenceCount == 0) <br>
            {<br>
                //! Free video encoding stuff<br>
                if(mVideoStream)<br>
                    closeVideo(mContext, mVideoStream);<br>
                for(int i = 0; i < mContext->nb_streams; i++)<br>
                {<br>
                    av_freep(&mContext->streams[i]->codec);<br>
                    av_freep(&mContext->streams[i]);<br>
                }<br>
                av_free(mContext);<br>
        <br>
                //! Video streaming stuff<br>
               
        envir().taskScheduler().deleteEventTrigger(eventTriggerId);<br>
                eventTriggerId = 0;<br>
            }<br>
        }<br>
        <br>
        void imLiveStreamSource::doGetNextFrame()<br>
        {<br>
          // This function is called (by our 'downstream' object) when
        it asks for new data.<br>
        <br>
          // Note: If, for some reason, the source device stops being
        readable (e.g., it gets closed), then you do the following:<br>
          if(!mReady) <br>
          {<br>
            handleClosure(this);<br>
            return;<br>
          }<br>
        <br>
          // If a new frame of data is immediately available to be
        delivered, then do this now:<br>
          if (mNextEncodedVideoFrame) {<br>
              write_video_frame(mContext, mVideoStream);<br>
            deliverFrame();<br>
          }<br>
          else<br>
              mNextEncodedVideoFrameWanted = true;<br>
        <br>
          // No new data is immediately available to be delivered.  We
        don't do anything more here.<br>
          // Instead, our event trigger must be called (e.g., from a
        separate thread) when new data becomes available.<br>
        }<br>
        <br>
        void imLiveStreamSource::deliverFrame0(void* clientData) <br>
        {<br>
          ((imLiveStreamSource*)clientData)->deliverFrame();<br>
        }<br>
        <br>
        void imLiveStreamSource::deliverFrame() <br>
        {<br>
        <br>
          if (!isCurrentlyAwaitingData()) return; // we're not ready for
        the data yet<br>
        <br>
          u_int8_t* newFrameDataStart = mEncodedVideoFrame;<br>
          unsigned int newFrameSize = (int)(mEncodedVideoFrameSize);<br>
        <br>
          // Deliver the data here:<br>
          if (newFrameSize > fMaxSize) {<br>
            fFrameSize = fMaxSize;<br>
            fNumTruncatedBytes = newFrameSize - fMaxSize;<br>
          } else {<br>
            fFrameSize = newFrameSize;<br>
          }<br>
          gettimeofday(&fPresentationTime, NULL); // If you have a
        more accurate time - e.g., from an encoder - then use that
        instead.<br>
          // If the device is *not* a 'live source' (e.g., it comes
        instead from a file or buffer), then set
        "fDurationInMicroseconds" here.<br>
          memmove(fTo, newFrameDataStart, fFrameSize);<br>
          mNextEncodedVideoFrame = false;<br>
        <br>
          // After delivering the data, inform the reader that it is now
        available:<br>
          FramedSource::afterGetting(this);<br>
        }<br>
      </small></small><br>
    ######################################################<br>
    <br>
    imLiveStreamMediaSubsession.cpp //derived from
    OnDemandServerMediaSubsession<br>
    <br>
    ######################################################<br>
    <br>
    <small><small>imLiveStreamMediaSubsession::imLiveStreamMediaSubsession(UsageEnvironment&

        env, char const* fileName, Boolean reuseFirstSource)<br>
          : OnDemandServerMediaSubsession(env, reuseFirstSource)<br>
        {<br>
        }<br>
        <br>
        imLiveStreamMediaSubsession::~imLiveStreamMediaSubsession()<br>
        {<br>
        }<br>
        <br>
        imLiveStreamMediaSubsession*
        imLiveStreamMediaSubsession::createNew(UsageEnvironment&
        env, char const* fileName, Boolean reuseFirstSource)<br>
        {<br>
          return new imLiveStreamMediaSubsession(env, fileName,
        reuseFirstSource);<br>
        }<br>
        <br>
        FramedSource*
        imLiveStreamMediaSubsession::createNewStreamSource(unsigned
        clientSessionId, unsigned& estBitrate)<br>
        {<br>
          estBitrate = 400; // kbps, estimate ??<br>
        <br>
          imLiveStreamParameters param;<br>
          param.mBitRate = 400000;<br>
          param.mCodec = "x264";<br>
          param.mFrameRate = 24;<br>
          param.mHeight = 480;<br>
          param.mWidth = 800;<br>
          // Create a framer for the Video Elementary Stream:<br>
          return imLiveStreamSource::createNew(envir(), param);<br>
        }<br>
        <br>
        RTPSink*
        imLiveStreamMediaSubsession::createNewRTPSink(Groupsock*
        rtpGroupsock,<br>
                                          unsigned char
        rtpPayloadTypeIfDynamic,<br>
                                          FramedSource* /*inputSource*/)<br>
        {<br>
            return H264VideoRTPSink::createNew(envir(), rtpGroupsock,
        rtpPayloadTypeIfDynamic);<br>
        }</small></small><br>
    <br>
    <br>
    ######################################################<br>
    <br>
    main.cpp<br>
    <br>
    ######################################################<br>
    <br>
    <small><small>int main(int argc, char** argv) {<br>
          // Begin by setting up our usage environment:<br>
          TaskScheduler* scheduler = BasicTaskScheduler::createNew();<br>
          env = BasicUsageEnvironment::createNew(*scheduler);<br>
        <br>
          UserAuthenticationDatabase* authDB = NULL;<br>
        <br>
          // Create the RTSP server:<br>
          RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554,
        authDB);<br>
          if (rtspServer == NULL) {<br>
            *env << "Failed to create RTSP server: " <<
        env->getResultMsg() << "\n";<br>
            exit(1);<br>
          }<br>
        <br>
          char const* descriptionString<br>
            = "Session streamed by \"INGAme\"";<br>
        <br>
          // A H.264 video elementary stream:<br>
          {<br>
            char const* streamName = "h264ESVideoTest";<br>
            char const* inputFileName = "test.264";<br>
        <br>
            ServerMediaSession* sms =
        ServerMediaSession::createNew(*env, streamName, streamName,
        descriptionString);<br>
           
        sms->addSubsession(imLiveStreamMediaSubsession::createNew(*env,
        inputFileName, reuseFirstSource));<br>
            rtspServer->addServerMediaSession(sms);<br>
        <br>
            announceStream(rtspServer, sms, streamName, inputFileName);<br>
          }<br>
        <br>
          // Also, attempt to create a HTTP server for RTSP-over-HTTP
        tunneling.<br>
          // Try first with the default HTTP port (80), and then with
        the alternative HTTP<br>
          // port numbers (8000 and 8080).<br>
        <br>
          if (rtspServer->setUpTunnelingOverHTTP(80) ||
        rtspServer->setUpTunnelingOverHTTP(8000) ||
        rtspServer->setUpTunnelingOverHTTP(8080)) {<br>
            *env << "\n(We use port " <<
        rtspServer->httpServerPortNum() << " for optional
        RTSP-over-HTTP tunneling.)\n";<br>
          } else {<br>
            *env << "\n(RTSP-over-HTTP tunneling is not
        available.)\n";<br>
          }<br>
        <br>
          env->taskScheduler().doEventLoop(); // does not return<br>
        <br>
          return 0; // only to prevent compiler warning<br>
        }<br>
        <br>
        static void announceStream(RTSPServer* rtspServer,
        ServerMediaSession* sms,<br>
                       char const* streamName, char const*
        inputFileName) {<br>
          char* url = rtspServer->rtspURL(sms);<br>
          UsageEnvironment& env = rtspServer->envir();<br>
          env << "\n\"" << streamName << "\" stream,
        from the file \""<br>
              << inputFileName << "\"\n";<br>
          env << "Play this stream using the URL \"" << url
        << "\"\n";<br>
          delete[] url;<br>
        }</small></small><br>
    <br>
    #########################################<br>
    <br>
    Thank you for your time reading this!<br>
    <br>
    Christian<br>
    <br>
    <br>
  </body>
</html>