[Live-devel] about DeviceSource for multi channel encoder

reply2010 reply2010 at yeah.net
Fri Jul 6 08:30:05 PDT 2012


hi,experts

i modify DeviceSource.cpp to stream a live encoder,but i find triggerEvent could not trigger
deliverFrame0 function which is registered by eventTriggerId = env.taskScheduler().createEventTrigger(deliverFrame0);
what is wrong with me?
because my encoder has multi channel,i have to modify static variable EventTriggerId to non-static
here is my code
//***********************************************************
#include "DeviceSource.hh"
#include <GroupsockHelper.hh> // for "gettimeofday()"

DeviceSource*
DeviceSource::createNew(UsageEnvironment& env /*,
   DeviceParameters params*/) {
  return new DeviceSource(env/*, params*/);
}
//for non-static eventtriggerid
//EventTriggerId DeviceSource::eventTriggerId = 0;
//unsigned DeviceSource::referenceCount = 0;


DeviceSource::DeviceSource(UsageEnvironment& env/*,
      DeviceParameters params*/)
  : FramedSource(env)/*, fParams(params)*/ {
//****************************************
/* 
 if (referenceCount == 0) {
    // Any global initialization of the device would be done here:
    //%%% TO BE WRITTEN %%%
  }
  ++referenceCount;
*/
//***********************************

  // Any instance-specific initialization of the device would be done here:
  //%%% TO BE WRITTEN %%%

  // We arrange here for our "deliverFrame" member function to be called
  // whenever the next frame of data becomes available from the device.
  //
  // If the device can be accessed as a readable socket, then one easy way to do this is using a call to
  //     envir().taskScheduler().turnOnBackgroundReadHandling( ... )
  // (See examples of this call in the "liveMedia" directory.)
  //
  // If, however, the device *cannot* be accessed as a readable socket, then instead we can implement it using 'event triggers':
  // Create an 'event trigger' for this device (if it hasn't already been done):
  if (eventTriggerId == 0) {
    eventTriggerId = env.taskScheduler().createEventTrigger(deliverFrame0);
  }
}

DeviceSource::~DeviceSource() {
  // Any instance-specific 'destruction' (i.e., resetting) of the device would be done here:
  //%%% TO BE WRITTEN %%%

  //*****************************
  /*--referenceCount;
  if (referenceCount == 0) {*/
    // Any global 'destruction' (i.e., resetting) of the device would be done here:
    //%%% TO BE WRITTEN %%%

    // Reclaim our 'event trigger'
    envir().taskScheduler().deleteEventTrigger(eventTriggerId);
    eventTriggerId = 0;
 //************************************* }
}

void DeviceSource::doGetNextFrame() {

}

void DeviceSource::deliverFrame0(void* clientData) {

 ((DeviceSource*)clientData)->deliverFrame();
  return;
}

void DeviceSource::deliverFrame() {
  // This function is called when new frame data is available from the device.
  // We deliver this data by copying it to the 'downstream' object, using the following parameters (class members):
  // 'in' parameters (these should *not* be modified by this function):
  //     fTo: The frame data is copied to this address.
  //         (Note that the variable "fTo" is *not* modified.  Instead,
  //          the frame data is copied to the address pointed to by "fTo".)
  //     fMaxSize: This is the maximum number of bytes that can be copied
  //         (If the actual frame is larger than this, then it should
  //          be truncated, and "fNumTruncatedBytes" set accordingly.)
  // 'out' parameters (these are modified by this function):
  //     fFrameSize: Should be set to the delivered frame size (<= fMaxSize).
  //     fNumTruncatedBytes: Should be set iff the delivered frame would have been
  //         bigger than "fMaxSize", in which case it's set to the number of bytes
  //         that have been omitted.
  //     fPresentationTime: Should be set to the frame's presentation time
  //         (seconds, microseconds).  This time must be aligned with 'wall-clock time' - i.e., the time that you would get
  //         by calling "gettimeofday()".
  //     fDurationInMicroseconds: Should be set to the frame's duration, if known.
  //         If, however, the device is a 'live source' (e.g., encoded from a camera or microphone), then we probably don't need
  //         to set this variable, because - in this case - data will never arrive 'early'.
  // Note the code below.
 
//***************************************************
  if (!isCurrentlyAwaitingData()) {return;} // we're not ready for the data yet

  // Deliver the data here:
  if (newFrameSize > fMaxSize) {
    fFrameSize = fMaxSize;
    fNumTruncatedBytes = newFrameSize - fMaxSize;
  } else {
    fFrameSize = newFrameSize;
  }
  //gettimeofday(&fPresentationTime, NULL); // If you have a more accurate time - e.g., from an encoder - then use that instead.
  // If the device is *not* a 'live source' (e.g., it comes instead from a file or buffer), then set "fDurationInMicroseconds" here.
  memmove(fTo, newFrameDataStart, fFrameSize);
  requestData = true;
  // After delivering the data, inform the reader that it is now available:
  FramedSource::afterGetting(this);
//***************************************************
}
void DeviceSource::signalNewFrameData(UsageEnvironment* ourenv,DeviceSource* ourDevice) {
  //TaskScheduler ourScheduler ;
  //ourScheduler = env->taskScheduler(); //%%% TO BE WRITTEN %%%
  //DeviceSource* ourDevice  = NULL; //%%% TO BE WRITTEN %%%

  //if (ourScheduler != NULL) { // sanity check
    ourenv->taskScheduler().triggerEvent(ourDevice->eventTriggerId, ourDevice);
  //}
}


here is DeviceSource.hh

/**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)

This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
more details.

You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
**********/
// "liveMedia"
// Copyright (c) 1996-2011 Live Networks, Inc.  All rights reserved.
// A template for a MediaSource encapsulating an audio/video input device
//
// NOTE: Sections of this code labeled "%%% TO BE WRITTEN %%%" are incomplete, and needto be written by the programmer
// (depending on the features of the particulardevice).
// C++ header

#ifndef _DEVICE_SOURCE_HH
#define _DEVICE_SOURCE_HH

#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif

// The following class can be used to define specific encoder parameters
/*class DeviceParameters {
  //%%% TO BE WRITTEN %%%
};*/

class DeviceSource: public FramedSource {
public:
  static DeviceSource* createNew(UsageEnvironment& env /*,
     DeviceParameters params*/);

public:
  //static EventTriggerId eventTriggerId;
 EventTriggerId eventTriggerId; //here change static into non-static
protected:
  DeviceSource(UsageEnvironment& env /*, DeviceParameters params*/);
  // called only by createNew(), or by subclass constructors
  virtual ~DeviceSource();

private:
  // redefined virtual functions:
  virtual void doGetNextFrame();

private:
  static void deliverFrame0(void* clientData);
public:
  void signalNewFrameData(UsageEnvironment* env,DeviceSource* ourDevice);//new func added by me
public:
  void deliverFrame();

private:
  //static unsigned referenceCount; // used to count how many instances of this class currently exist
  //DeviceParameters fParams;
public:
   bool requestData;
public:
  u_int8_t* newFrameDataStart;
  unsigned newFrameSize;
};

#endif

 

at last,here is my main code part

note that DVRChSource[i]'s type is DeviceSource *
......
     if(DVRChSource[i]->requestData){
 
      DVRChSource[i]->newFrameDataStart=(u_int8_t*)ChannelBuffer[i];
      DVRChSource[i]->newFrameSize=ChannelBufferSize[i];
      
      DVRChSource[i]->requestData=false;
      //env->taskScheduler().triggerEvent(DVRChSource[i]->eventTriggerId, DVRChSource[i]); 
      
      DVRChSource[i]->signalNewFrameData(env,DVRChSource[i]);

     }
......

i debug and trace these code,i find deliverFrame0 could not be triggered
after ourenv->taskScheduler().triggerEvent(ourDevice->eventTriggerId, ourDevice); executed.
any light or any DeviceSource sample would be appreciated.thanks a lot.

jounin



-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.live555.com/pipermail/live-devel/attachments/20120706/e262f0f2/attachment-0001.html>


More information about the live-devel mailing list