[Live-devel] live audio source with onDemandServer
Neerav Patel
neeravpatel at hotmail.com
Wed Aug 20 06:30:40 PDT 2014
Hi I am trying to setup live555 to stream rtsp with an ondemandserver from a microphone, but I am not sure how to do so. I have attempted to do this by overriding OnDemandServerMediaSubsession and FramedSource, but I am running into issues where I hear a bit of sound for half a second and then quiet, in VLC the Messages say "buffer arrived way too early"... I am encoding using ffmpeg to encode the audio as mp2. I have attached what I am doing here:
#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#include "ImageTransfer.h"
#endif
class MP2DeviceSource : public FramedSource {
public:
static MP2DeviceSource* createNew(UsageEnvironment& env, unsigned int stream_id,
AudioTransfer * audioTransfer);
public:
EventTriggerId eventTriggerId;
protected:
MP2DeviceSource(UsageEnvironment& env,
ImageTransfer * imageTransfer );
virtual ~MP2DeviceSource();
private:
virtual void doGetNextFrame();
private:
static void deliverFrame0(void* clientData);
void deliverFrame();
private:
AudioTransfer * audioTx;
};
Collapse | Copy Code#include "MP2DeviceSource.h"
MP2DeviceSource*
MP2DeviceSource::createNew(UsageEnvironment& env, unsigned int stream_id,
AudioTransfer * audioTransfer)
{
return new MaxMP2DeviceSource(env, audioTransfer);
}
MP2DeviceSource::MP2DeviceSource(UsageEnvironment& env, AudioTransfer * audioTransfer)
: FramedSource(env), audioTx(audioTransfer)
{
if (eventTriggerId == 0)
eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
}
MP2DeviceSource::~MP2DeviceSource()
{
envir().taskScheduler().deleteEventTrigger(eventTriggerId);
eventTriggerId = 0;
}
void MP2DeviceSource::doGetNextFrame()
{
deliverFrame();
}
void MP2DeviceSource::deliverFrame0(void *clientData)
{
((MP2DeviceSource*)clientData)->deliverFrame();
}
static const unsigned __int64 epoch = 116444736000000000;
int
gettimeofday(struct timeval * tp, struct timezone * tzp)
{
FILETIME file_time;
SYSTEMTIME system_time;
ULARGE_INTEGER ularge;
GetSystemTime(&system_time);
SystemTimeToFileTime(&system_time, &file_time);
ularge.LowPart = file_time.dwLowDateTime;
ularge.HighPart = file_time.dwHighDateTime;
tp->tv_sec = (long) ((ularge.QuadPart - epoch) / 10000000L);
tp->tv_usec = (long) (system_time.wMilliseconds * 1000);
return 0;
}
void MP2DeviceSource::deliverFrame()
{
gettimeofday(&fPresentationTime, NULL);
audioTx->GetMP2Image( &fTo, &fFrameSize );
fDurationInMicroseconds = 26000;
FramedSource::afterGetting(this);
}
Collapse | Copy Code#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH
#include "OnDemandServerMediaSubsession.hh"
#endif
class MP2AudioMediaSubsession: public OnDemandServerMediaSubsession {
public:
static MP2AudioMediaSubsession* createNew(UsageEnvironment& evn,
unsigned int sid,
Boolean requestFirstSource,
AudioTransfer * audioTransfer);
protected:
MP2AudioMediaSubsession(UsageEnvironment& env,
Boolean reuseFirstSource,
AudioTransfer * audioTransfer);
virtual ~MP2AudioMediaSubsession();
protected:
virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupSock,
unsigned char rtpPayloadTypeIfDynamic,
FramedSource* inputSource);
protected:
unsigned int id;
AudioTransfer * audioTx;
};
Collapse | Copy Code#include "MP2MediaSubsession.h"
#include "MP2DeviceSource.h"
#include "MPEG1or2AudioRTPSink.hh"
#include "MPEG1or2AudioStreamFramer.hh"
MP2AudioMediaSubsession*
MP2AudioMediaSubsession::createNew(UsageEnvironment& env,
Boolean reuseFirstSource,
AudioTransfer * audioTransfer)
{
return new MP2AudioMediaSubsession(env, reuseFirstSource, imageTransfer );
}
MP2AudioMediaSubsession::MP2AudioMediaSubsession(UsageEnvironment& env,
Boolean reuseFirstSource,
AudioTransfer * audioTransfer)
: OnDemandServerMediaSubsession(env, reuseFirstSource), audioTx(audioTransfer)
{
}
FramedSource* MP2AudioMediaSubsession::createNewStreamSource(unsigned clientSessionId,
unsigned &estBitrate)
{
estBitrate = 44100;
MP2DeviceSource *source = MP2DeviceSource::createNew(envir(), id, audioTx);
return MPEG1or2AudioStreamFramer::createNew(envir(), source );
}
RTPSink* MP2AudioMediaSubsession::createNewRTPSink(Groupsock* rtpGroupSock,
unsigned char rtpPayloadTypeIfDynamic,
FramedSource* inputSource)
{
return MPEG1or2AudioRTPSink::createNew( envir(), rtpGroupSock );
}
MP2AudioMediaSubsession::~MP2AudioMediaSubsession()
{
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.live555.com/pipermail/live-devel/attachments/20140820/37cf7b1d/attachment-0001.html>
More information about the live-devel
mailing list