[Live-devel] Fails to reschedule getNextFrame(...,pt)

Dan Weber dweber at robotics.net
Fri May 11 15:44:40 PDT 2007


Hi guys,

Attached is a patch that adds multiple payload type support 
at the RTP Source level.  I'm experiencing an issue after I get say DTMF pt 101
and it properly calls my dtmfsource's class afterGettingFunc, and then that rtpsource,
seems to either A) stop listening for network input (although no bounces shown in ethereal)
or B) fails to reschedule.  My DTMFSource class also attached, properly calls 
FramedSource::afterGetting, this as well.  There are two seperate SimpleRTPSinks 
on the same groupsock (one for audio and one for dtmf).  So I'm confused,

Does anybody know of an obvious answer to what could be the cause of the problem?

Thanks,
Dan  
-------------- next part --------------
Index: live/liveMedia/include/MultiFramedRTPSource.hh
===================================================================
--- live.orig/liveMedia/include/MultiFramedRTPSource.hh	2007-05-08 16:10:07.000000000 -0400
+++ live/liveMedia/include/MultiFramedRTPSource.hh	2007-05-08 18:43:01.000000000 -0400
@@ -26,10 +26,64 @@
 #include "RTPSource.hh"
 #endif
 
+#include <map>
+#include "FramedSource.hh"
+
+struct MultiFramedRTPSourceInfo {
+	typedef void (afterGettingFunc)(void* clientData, unsigned frameSize,
+                                  unsigned numTruncatedBytes,
+                                  struct timeval presentationTime,
+                                  unsigned durationInMicroseconds);
+
+	unsigned char rtpPayloadType;
+	unsigned char* fTo;
+	unsigned fMaxSize;
+	afterGettingFunc* fAfterGettingFunc;
+	void* fAfterGettingFuncClientData;
+
+	MultiFramedRTPSourceInfo() {}
+
+	MultiFramedRTPSourceInfo(unsigned char pt,
+				 unsigned char* to,
+				 unsigned maxSize,
+				 afterGettingFunc* afterGettingFunc,
+				 void* afterGettingFuncClientData) :
+		rtpPayloadType(pt),
+		fTo(to),
+		fMaxSize(maxSize),
+		fAfterGettingFunc(afterGettingFunc),
+		fAfterGettingFuncClientData(afterGettingFuncClientData) {}
+};
+
+
 class BufferedPacket; // forward
 class BufferedPacketFactory; // forward
 
 class MultiFramedRTPSource: public RTPSource {
+public:
+  void getNextFrame(unsigned char* to, unsigned maxSize,
+  		    afterGettingFunc* afterGettingFunc,
+		    void* afterGettingClientData,
+		    onCloseFunc* onCloseFunc,
+		    void* onCloseClientData) {
+	FramedSource::getNextFrame(to,maxSize,afterGettingFunc,afterGettingClientData,
+				   onCloseFunc, onCloseClientData);
+  }
+  void getNextFrame(unsigned char* to, unsigned maxSize,
+                    afterGettingFunc* afterGettingFunc,
+                    void* afterGettingClientData,
+                    onCloseFunc* onCloseFunc,
+                    void* onCloseClientData, unsigned char rtpPayloadType) {
+
+	mPayloadHash[rtpPayloadType] = MultiFramedRTPSourceInfo(rtpPayloadType,
+								to,
+								maxSize,
+								afterGettingFunc,
+								afterGettingClientData);
+	// Can't handle onClose for now
+  }
+
+
 protected:
   MultiFramedRTPSource(UsageEnvironment& env, Groupsock* RTPgs,
 		       unsigned char rtpPayloadFormat,
@@ -50,6 +104,9 @@
 protected:
   Boolean fCurrentPacketBeginsFrame;
   Boolean fCurrentPacketCompletesFrame;
+  std::map<unsigned char,MultiFramedRTPSourceInfo> mPayloadHash;
+  unsigned char fAfterGettingPt;
+
 
 protected:
   // redefined virtual functions:
@@ -64,6 +121,19 @@
   void reset();
   void doGetNextFrame1();
 
+  static void alternatePtAfterGetting(FramedSource* framedSource) {
+  	MultiFramedRTPSource* fs = dynamic_cast<MultiFramedRTPSource*>(framedSource);
+	if (!fs)
+		throw;
+  	MultiFramedRTPSourceInfo& info = fs->mPayloadHash[fs->fAfterGettingPt];
+	if (info.fAfterGettingFunc)
+		(*info.fAfterGettingFunc)(info.fAfterGettingFuncClientData,fs->fFrameSize,
+			       fs->fNumTruncatedBytes,fs->fPresentationTime,
+			       fs->fDurationInMicroseconds);
+	fs->mPayloadHash.erase(fs->fAfterGettingPt);
+	fs->fAfterGettingPt = 0;
+  }
+
   static void networkReadHandler(MultiFramedRTPSource* source, int /*mask*/);
   friend void networkReadHandler(MultiFramedRTPSource*, int);
 
@@ -94,7 +164,7 @@
   void assignMiscParams(unsigned short rtpSeqNo, unsigned rtpTimestamp,
 			struct timeval presentationTime,
 			Boolean hasBeenSyncedUsingRTCP, 
-			Boolean rtpMarkerBit, struct timeval timeReceived);
+			Boolean rtpMarkerBit, struct timeval timeReceived, unsigned char rtpPayloadType);
   void skip(unsigned numBytes); // used to skip over an initial header
   void removePadding(unsigned numBytes); // used to remove trailing bytes
   void appendData(unsigned char* newData, unsigned numBytes);
@@ -112,6 +182,7 @@
   unsigned char* data() const { return &fBuf[fHead]; }
   unsigned dataSize() const { return fTail-fHead; }
   Boolean rtpMarkerBit() const { return fRTPMarkerBit; }
+  unsigned char rtpPayloadType() const { return fRTPPayloadType; }
 
 protected:
   virtual void reset();
@@ -138,6 +209,7 @@
   Boolean fHasBeenSyncedUsingRTCP;
   Boolean fRTPMarkerBit;
   struct timeval fTimeReceived;
+  unsigned char fRTPPayloadType;
 };
 
 // A 'factory' class for creating "BufferedPacket" objects.
Index: live/liveMedia/MultiFramedRTPSource.cpp
===================================================================
--- live.orig/liveMedia/MultiFramedRTPSource.cpp	2007-05-08 16:41:44.000000000 -0400
+++ live/liveMedia/MultiFramedRTPSource.cpp	2007-05-10 18:41:40.000000000 -0400
@@ -169,10 +169,20 @@
 
     // The packet is usable. Deliver all or part of it to our caller:
     unsigned frameSize;
-    nextPacket->use(fTo, fMaxSize, frameSize, fNumTruncatedBytes,
+    if (nextPacket->rtpPayloadType() == rtpPayloadFormat()) {
+    	nextPacket->use(fTo, fMaxSize, frameSize, fNumTruncatedBytes,
 		    fCurPacketRTPSeqNum, fCurPacketRTPTimestamp,
 		    fPresentationTime, fCurPacketHasBeenSynchronizedUsingRTCP,
 		    fCurPacketMarkerBit);
+    } else {
+    	// This implies we have an alternate handler for this payload type
+	MultiFramedRTPSourceInfo& info = mPayloadHash[nextPacket->rtpPayloadType()];
+	fAfterGettingPt = nextPacket->rtpPayloadType();
+	nextPacket->use(info.fTo,info.fMaxSize,frameSize,fNumTruncatedBytes,
+			fCurPacketRTPSeqNum, fCurPacketRTPTimestamp,
+			fPresentationTime, fCurPacketHasBeenSynchronizedUsingRTCP,
+			fCurPacketMarkerBit);
+    }
     fFrameSize += frameSize;
     
     if (!nextPacket->hasUsableData()) {
@@ -192,16 +202,33 @@
 	// Common case optimization: There are no more queued incoming packets, so this code will not get
 	// executed again without having first returned to the event loop.  Call our 'after getting' function
 	// directly, because there's no risk of a long chain of recursion (and thus stack overflow):
-	afterGetting(this);
+
+	if (nextPacket->rtpPayloadType() == rtpPayloadFormat()) {
+		afterGetting(this);
+	} else {
+		MultiFramedRTPSourceInfo& info = mPayloadHash[nextPacket->rtpPayloadType()];
+		(*info.fAfterGettingFunc)(info.fAfterGettingFuncClientData,fFrameSize,
+				       fNumTruncatedBytes,fPresentationTime,
+				       fDurationInMicroseconds);
+		mPayloadHash.erase(nextPacket->rtpPayloadType());
+	}
       } else {
 	// Special case: Call our 'after getting' function via the event loop.
+
 	nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
-								 (TaskFunc*)FramedSource::afterGetting, this);
+								 nextPacket->rtpPayloadType() == rtpPayloadFormat() ?
+								 (TaskFunc*)FramedSource::afterGetting : (TaskFunc*)alternatePtAfterGetting, this);
+
       }
     } else {
       // This packet contained fragmented data, and does not complete
       // the data that the client wants.  Keep getting data:
-      fTo += frameSize; fMaxSize -= frameSize;
+      if (nextPacket->rtpPayloadType() == rtpPayloadFormat()) {
+      	fTo += frameSize; fMaxSize -= frameSize;
+      } else {
+      	MultiFramedRTPSourceInfo& info = mPayloadHash[nextPacket->rtpPayloadType()];
+	info.fTo += frameSize; info.fMaxSize -= frameSize;
+      }
       fNeedDelivery = True;
     }
   }
@@ -265,7 +292,8 @@
     // Check the Payload Type.
     if ((unsigned char)((rtpHdr&0x007F0000)>>16)
 	!= source->rtpPayloadFormat()) {
-      break;
+      	if (source->mPayloadHash.find((unsigned char)((rtpHdr&0x007F0000)>>16)) == source->mPayloadHash.end())
+      		break;
     }
     
     // The rest of the packet is the usable data.  Record and save it:
@@ -287,7 +315,7 @@
     gettimeofday(&timeNow, NULL);
     bPacket->assignMiscParams(rtpSeqNo, rtpTimestamp, presentationTime,
 			      hasBeenSyncedUsingRTCP, rtpMarkerBit,
-			      timeNow);
+			      timeNow, (unsigned char)((rtpHdr&0x007F0000)>>16));
     if (!source->fReorderingBuffer->storePacket(bPacket)) break;
 
     readSuccess = True;
@@ -360,13 +388,14 @@
 ::assignMiscParams(unsigned short rtpSeqNo, unsigned rtpTimestamp,
 		   struct timeval presentationTime,
 		   Boolean hasBeenSyncedUsingRTCP, Boolean rtpMarkerBit,
-		   struct timeval timeReceived) {
+		   struct timeval timeReceived, unsigned char rtpPayloadType) {
   fRTPSeqNo = rtpSeqNo;
   fRTPTimestamp = rtpTimestamp;
   fPresentationTime = presentationTime;
   fHasBeenSyncedUsingRTCP = hasBeenSyncedUsingRTCP;
   fRTPMarkerBit = rtpMarkerBit;
   fTimeReceived = timeReceived;
+  fRTPPayloadType = rtpPayloadType;
 }
 
 void BufferedPacket::skip(unsigned numBytes) {
-------------- next part --------------
#include "DTMFSource.h"
#include <algorithm>
namespace msoup {

DTMFSource::DTMFSource(UsageEnvironment& ue, MultiFramedRTPSource* fs,
		uint8_t payloadType) :
	FramedFilter(ue, fs), mBuffer(1000), mPayloadType(payloadType) {
}

void DTMFSource::doGetNextFrame() {
	if (!fInputSource) {
		return;
	}

	MultiFramedRTPSource* fs = static_cast<MultiFramedRTPSource*>(fInputSource);
	fs->getNextFrame(&mBuffer[0], 1000, afterGettingFrame, this,
			FramedFilter::handleClosure, this, mPayloadType);
}

void DTMFSource::afterGettingFrame(unsigned frameSize,
		unsigned numTruncatedBytes, struct timeval presentationTime,
		unsigned durationInMicroseconds) {
	
	std::copy(mBuffer.begin(),mBuffer.begin()+frameSize,fTo);
	fPresentationTime = presentationTime;
	fDurationInMicroseconds = durationInMicroseconds;
	fNumTruncatedBytes = numTruncatedBytes;
	fFrameSize = frameSize;
	FramedSource::afterGetting(this);
}
DTMFSource::~DTMFSource() {
}

}
-------------- next part --------------
#ifndef DTMFSOURCE_H_
#define DTMFSOURCE_H_

#include "liveMedia.hh"
#include <vector>
namespace msoup
{

class DTMFSource : public FramedFilter
{
	std::vector<uint8_t> mBuffer;
	const uint8_t mPayloadType;
public:
	DTMFSource(UsageEnvironment& ue, MultiFramedRTPSource* fs, uint8_t payloadType = 101);

	
	virtual void doGetNextFrame();
	virtual ~DTMFSource();
private:
	static void afterGettingFrame(void* clientData, unsigned frameSize,
			unsigned numTruncatedBytes, struct timeval presentationTime,
			unsigned durationInMicroseconds) {
		static_cast<DTMFSource*>(clientData)->afterGettingFrame(frameSize,numTruncatedBytes,presentationTime,durationInMicroseconds);
	}
	
	void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
			struct timeval presentationTime, unsigned durationInMicroseconds);
};

}

#endif /*DTMFSOURCE_H_*/


More information about the live-devel mailing list