[Live-devel] about mjpeg ondemandrtspserver
Zhixue Zhang
zhangzx at rcs-9000.com
Tue Dec 6 20:21:38 PST 2005
Hi, Ross Finlayson
I am trying to set up an MJPEGOnDemandRTSPServer. The following is my subsession's code. But When I tried OpenRTSP, I failed to get SDPDescription. Can you help me?
GXCJMPEGServerMediaSubsession*
GXCJMPEGServerMediaSubsession::createNew( UsageEnvironment& env,
Boolean _reuseFirstSource, GXCBasicSource* _source)
{
return new GXCJMPEGServerMediaSubsession( env, _reuseFirstSource, _source );
}
GXCJMPEGServerMediaSubsession
::GXCJMPEGServerMediaSubsession( UsageEnvironment& env,
Boolean _reuseFirstSource, GXCBasicSource *_source )
:OnDemandServerMediaSubsession( env, _reuseFirstSource ),
fDoneFlag(0), fInputSource( _source )
{
}
GXCJMPEGServerMediaSubsession::~GXCJMPEGServerMediaSubsession()
{
}
static void afterPlayingDummy( void* cliendata)
{
GXCJMPEGServerMediaSubsession* subsess
= ( GXCJMPEGServerMediaSubsession* )cliendata;
// Signal the event loop that we're done:
subsess->setDoneFlag();
}
static void checkForAuxSDPLine( void* clienData )
{
GXCJMPEGServerMediaSubsession* subsess
= (GXCJMPEGServerMediaSubsession*) clienData;
subsess->checkForAuxSDPLine1();
}
void GXCJMPEGServerMediaSubsession::setDoneFlag()
{
fDoneFlag = ~0;
}
void GXCJMPEGServerMediaSubsession::checkForAuxSDPLine1()
{
if ( fDummyRTPSink->auxSDPLine() != NULL )
{
// Signal the event loop that we're done:
setDoneFlag();
} else {
// try again after a brief delay:
int uSecsToDelay = 100000; // 100 ms
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForAuxSDPLine, this);
}
}
char const* GXCJMPEGServerMediaSubsession
::getAuxSDPLine( RTPSink* rtpSink, FramedSource* inputSource )
{
// Note: For MPEG-4 video files, the 'config' information isn't known
// until we start reading the file. This means that "rtpSink"s
// "auxSDPLine()" will be NULL initially, and we need to start reading
// data from our file until this changes.
fDummyRTPSink = rtpSink;
// Start reading the file:
fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
// Check whether the sink's 'auxSDPLine()' is ready:
checkForAuxSDPLine(this);
envir().taskScheduler().doEventLoop(&fDoneFlag);
char const* auxSDPLine = fDummyRTPSink->auxSDPLine();
return auxSDPLine;
}
FramedSource* GXCJMPEGServerMediaSubsession
::createNewStreamSource( unsigned /*cliendSessionId*/, unsigned& estBitrate)
{
estBitrate = 500; // kbps, estimate
// Create a framer for the Video Elementary Stream:
return GXCJPEGFramerSource::createNew(envir(), fInputSource);
}
RTPSink* GXCJMPEGServerMediaSubsession
::createNewRTPSink( Groupsock* rtpGroupsock,
unsigned char /*rtpPayloadTypeIfDynamic*/,
FramedSource* /*inputSource*/)
{
return JPEGVideoRTPSink::createNew( envir(), rtpGroupsock );
}
And GXCJPEGFramerSource is defined as following:
class GXCJPEGFramerSource: public JPEGVideoSource{
public:
static GXCJPEGFramerSource* createNew(UsageEnvironment& env,
GXCBasicSource *_source,
unsigned preferredFrameSize = 0,
unsigned playTimePerFrame = 0);
// "preferredFrameSize" == 0 means 'no preference'
// "playTimePerFrame" is in microseconds
unsigned fileSize() const { return fFileSize; }
// 0 means zero-length, unbounded, or unknown
u_int8_t type() { return 1;}
u_int8_t qFactor() {return 1;}
u_int8_t width() {return 44;} // # pixels/8 (or 0 for 2048 pixels)
u_int8_t height() {return 36;} // # pixels/8 (or 0 for 2048 pixels)
protected:
GXCJPEGFramerSource(UsageEnvironment& env,
GXCBasicSource *_source,
unsigned preferredFrameSize,
unsigned playTimePerFrame);
// called only by createNew()
virtual ~GXCJPEGFramerSource();
private:
// redefined virtual functions:
virtual void doGetNextFrame();
private:
unsigned fPreferredFrameSize;
unsigned fPlayTimePerFrame;
unsigned fFileSize;
FILE *fFid;
};
¡¡¡¡¡¡
thanks a lot.
¡¡¡¡¡¡¡¡¡¡Zhixue Zhang
¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡zhangzx at rcs-9000.com
¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡¡2005-12-06
More information about the live-devel
mailing list