[Live-devel] AMR timestamps

David Betrand bidibulle at operamail.com
Mon Mar 5 07:59:11 PST 2007


OK sorry about that. I also fixed a small bug in method MIMEtype() 
that always returned the string "audio/AMR-WB" whatever the type of AMR.
David
> ----- Original Message -----
> From: "Ross Finlayson" <finlayson at live555.com>
> To: "LIVE555 Streaming Media - development & use" <live-devel at ns.live555.com>
> Subject: Re: [Live-devel] AMR timestamps
> Date: Mon, 5 Mar 2007 07:22:24 -0800
>
>
> Thanks, but please resend this as a proper patch file - e.g., generated using
> 	diff -c -B -b AMRAudioRTPSource.cpp.orig AMRAudioRTPSource.cpp
> --
>
> Ross Finlayson
> Live Networks, Inc.
> http://www.live555.com/
> _______________________________________________
> live-devel mailing list
> live-devel at lists.live555.com
> http://lists.live555.com/mailman/listinfo/live-devel

>


-- 
_______________________________________________
Surf the Web in a faster, safer and easier way:
Download Opera 9 at http://www.opera.com

Powered by Outblaze
-------------- next part --------------
*** AMRAudioRTPSource.cpp.orig	2007-03-05 16:15:43.000000000 +0100
--- AMRAudioRTPSource.cpp	2007-03-05 16:30:31.000000000 +0100
***************
*** 44,49 ****
--- 44,51 ----
    unsigned char* TOC() const { return fTOC; } // FT+Q value for each TOC entry
    unsigned& frameIndex() { return fFrameIndex; } // index of frame-block within pkt
  
+   Boolean fIsSynchronized;
+ 
  private:
    RawAMRRTPSource(UsageEnvironment& env, Groupsock* RTPgs,
  		  unsigned char rtpPayloadFormat,
***************
*** 96,104 ****
    virtual void doStopGettingFrames();
  
  private:
!   FramedSource* fInputSource;
    class AMRDeinterleavingBuffer* fDeinterleavingBuffer;
    Boolean fNeedAFrame;
  };
  
  
--- 98,107 ----
    virtual void doStopGettingFrames();
  
  private:
!   RawAMRRTPSource* fInputSource;
    class AMRDeinterleavingBuffer* fDeinterleavingBuffer;
    Boolean fNeedAFrame;
+   
  };
  
  
***************
*** 214,220 ****
    fIsWideband(isWideband), fIsOctetAligned(isOctetAligned),
    fIsInterleaved(isInterleaved), fCRCsArePresent(CRCsArePresent),
    fILL(0), fILP(0), fTOCSize(0), fTOC(NULL), fFrameIndex(0),
!   fNumSuccessiveSyncedPackets(0) {
  }
  
  RawAMRRTPSource::~RawAMRRTPSource() {
--- 217,223 ----
      fIsWideband(isWideband), fIsOctetAligned(isOctetAligned),
      fIsInterleaved(isInterleaved), fCRCsArePresent(CRCsArePresent),
      fILL(0), fILP(0), fTOCSize(0), fTOC(NULL), fFrameIndex(0),
!     fNumSuccessiveSyncedPackets(0), fIsSynchronized(false) {
  }
  
  RawAMRRTPSource::~RawAMRRTPSource() {
***************
*** 312,330 ****
  } 
  
  char const* RawAMRRTPSource::MIMEtype() const {
!   return fIsWideband ? "audio/AMR-WB" : "audio/AMR-WB";
  }
  
  Boolean RawAMRRTPSource::hasBeenSynchronizedUsingRTCP() {
!   // Don't report ourselves as being synchronized until we've received
!   // at least a complete interleave cycle of synchronized packets.
!   // This ensures that the receiver is currently getting a frame from
!   // a packet that was synchronized.
!   if (fNumSuccessiveSyncedPackets > (unsigned)(fILL+1)) {
!     fNumSuccessiveSyncedPackets = fILL + 2; // prevents overflow
!     return True;
!   }
!   return False;
  }
  
  
--- 315,325 ----
  } 
  
  char const* RawAMRRTPSource::MIMEtype() const {
!   return fIsWideband ? "audio/AMR-WB" : "audio/AMR-NB";
  }
  
  Boolean RawAMRRTPSource::hasBeenSynchronizedUsingRTCP() {
!   return fIsSynchronized;
  }
  
  
***************
*** 401,407 ****
    Boolean retrieveFrame(unsigned char* to, unsigned maxSize,
  			unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes,
  			u_int8_t& resultFrameHeader,
! 			struct timeval& resultPresentationTime);
  
    unsigned char* inputBuffer() { return fInputBuffer; }
    unsigned inputBufferSize() const { return AMR_MAX_FRAME_SIZE; }
--- 396,403 ----
    Boolean retrieveFrame(unsigned char* to, unsigned maxSize,
  			unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes,
  			u_int8_t& resultFrameHeader,
! 			struct timeval& resultPresentationTime, 
! 			Boolean& resultIsSynchronized);
  
    unsigned char* inputBuffer() { return fInputBuffer; }
    unsigned inputBufferSize() const { return AMR_MAX_FRAME_SIZE; }
***************
*** 418,423 ****
--- 414,421 ----
      unsigned char* frameData;
      u_int8_t frameHeader;
      struct timeval presentationTime;
+     
+     Boolean fIsSynchronized;
    };
  
    unsigned fNumChannels, fMaxInterleaveGroupSize;
***************
*** 439,446 ****
  ::createNew(UsageEnvironment& env,
  	    Boolean isWideband, unsigned numChannels, unsigned maxInterleaveGroupSize,
  	    RawAMRRTPSource* inputSource) {
!   return new AMRDeinterleaver(env, isWideband, numChannels, maxInterleaveGroupSize,
! 			      inputSource);
  }
  
  AMRDeinterleaver::AMRDeinterleaver(UsageEnvironment& env,
--- 437,443 ----
  ::createNew(UsageEnvironment& env,
  	    Boolean isWideband, unsigned numChannels, unsigned maxInterleaveGroupSize,
  	    RawAMRRTPSource* inputSource) {
!   return new AMRDeinterleaver(env, isWideband, numChannels, maxInterleaveGroupSize, inputSource);
  }
  
  AMRDeinterleaver::AMRDeinterleaver(UsageEnvironment& env,
***************
*** 464,470 ****
    // First, try getting a frame from the deinterleaving buffer:
    if (fDeinterleavingBuffer->retrieveFrame(fTo, fMaxSize,
  					   fFrameSize, fNumTruncatedBytes,
! 					   fLastFrameHeader, fPresentationTime)) {
      // Success!
      fNeedAFrame = False;
  
--- 462,470 ----
    // First, try getting a frame from the deinterleaving buffer:
    if (fDeinterleavingBuffer->retrieveFrame(fTo, fMaxSize,
  					   fFrameSize, fNumTruncatedBytes,
! 					   fLastFrameHeader, fPresentationTime, 
! 					   fInputSource->fIsSynchronized)) {
! 
      // Success!
      fNeedAFrame = False;
  
***************
*** 597,602 ****
--- 599,605 ----
    inBin.frameSize = frameSize;
    inBin.frameHeader = frameHeader;
    inBin.presentationTime = presentationTime;
+   inBin.fIsSynchronized = (RTPSource*)source->RTPSource::hasBeenSynchronizedUsingRTCP();
   
    if (curBuffer == NULL) curBuffer = createNewBuffer();
    fInputBuffer = curBuffer;
***************
*** 610,616 ****
  ::retrieveFrame(unsigned char* to, unsigned maxSize,
  		unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes,
  		u_int8_t& resultFrameHeader,
! 		struct timeval& resultPresentationTime) {
    if (fNextOutgoingBin >= fOutgoingBinMax) return False; // none left
  
    FrameDescriptor& outBin = fFrames[fIncomingBankId^1][fNextOutgoingBin];
--- 613,621 ----
  ::retrieveFrame(unsigned char* to, unsigned maxSize,
  		unsigned& resultFrameSize, unsigned& resultNumTruncatedBytes,
  		u_int8_t& resultFrameHeader,
! 		struct timeval& resultPresentationTime,
! 		Boolean& resultIsSynchronized) {
! 
    if (fNextOutgoingBin >= fOutgoingBinMax) return False; // none left
  
    FrameDescriptor& outBin = fFrames[fIncomingBankId^1][fNextOutgoingBin];
***************
*** 617,622 ****
--- 623,629 ----
    unsigned char* fromPtr = outBin.frameData;
    unsigned char fromSize = outBin.frameSize;
    outBin.frameSize = 0; // for the next time this bin is used
+   resultIsSynchronized = outBin.fIsSynchronized;
  
    // Check whether this frame is missing; if so, return a FT_NO_DATA frame:
    if (fromSize == 0) {


More information about the live-devel mailing list