live555库中,实现了h264、h265流的传输,但是没有对于mjpeg流的传输,遂笔者基于live555库实现mjpeg流传输,以下为代码及其解析。

一、代码思路

如下图,根据官网(http://www.live555.com/)中邮件列表中关于jpeg流传输的问答中可知,如要实现mjpeg流传输,需要实现doGetNextFrame()、type()、qFactor()、width()、height()这几个函数

                                            

在本文代码中,demo仿照testH265VideoStreamer.cpp编写。

width(),height()所需信息通过解析jpeg头可以获得,关于jpeg格式可以参考这篇博客(https://blog.csdn.net/fengbingchun/article/details/103528160)。在这里不再赘述。下面直接上代码

二、代码

如果嫌麻烦不想看,源码压缩包在这里。(https://download.csdn.net/download/Di_Wong/12601695)

(1)testMJPEGVideoStreamer.cpp

此为demo文件,使用前将变量inputFileName改为自己的文件,destinationAddress.s_addr改为VLC所在ip(如我用自己电脑打开地址,则改为我自己电脑ip)

该demo仿照testH265VideoStreamer.cpp编写,为基于多播方式的流媒体传输。


#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include "MJPEGVideoSource.hh"

UsageEnvironment* env;
char const* inputFileName = "CrossGuateTL.mjpeg.avi";
MJPEGVideoSource* videoSource;
RTPSink* videoSink;

void play(); // forward

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;

  // change to your ip!!
  destinationAddress.s_addr = our_inet_addr("192.168.16.1");
  // destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'JPEG Video RTP' sink from the RTP 'groupsock':
  videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "GroupsockHelper.hh"
#include "MJPEGVideoSource.hh"
UsageEnvironment* env;
char const* inputFileName = "CrossGuateTL.mjpeg.avi";
MJPEGVideoSource* videoSource;
RTPSink* videoSink;
void play(); // forward
int main(int argc, char** argv) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
// Create 'groupsocks' for RTP and RTCP:
struct in_addr destinationAddress;
// change to your ip!!
destinationAddress.s_addr = our_inet_addr("192.168.16.1");
// destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address.  If you wish instead to stream
// using unicast, then you should use the "testOnDemandRTSPServer"
// test program - not this test program - as a model.
const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 255;
const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);
Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source
// Create a 'JPEG Video RTP' sink from the RTP 'groupsock':
videoSink = JPEGVideoRTPSink::createNew(*env, &rtpGroupsock);
// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 5000; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL /* we're a server */,
True /* we're a SSM source */);
// Note: This starts RTCP running automatically
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"",
True /*SSM*/);
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
// Start the streaming:
*env << "Beginning streaming...\n";
play();
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
void afterPlaying(void* /*clientData*/) {
*env << "...done reading from file\n";
videoSink->stopPlaying();
Medium::close(videoSource);
// Note that this also closes the input file that this source read from.
// Start playing once again:
play();
}
void play() {
// Open the input file as a 'byte-stream file source':
ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, inputFileName);
if (fileSource == NULL) {
*env << "Unable to open file \"" << inputFileName
<< "\" as a byte-stream file source\n";
exit(1);
}
FramedSource* videoES = fileSource;
// Create a framer for the Video Elementary Stream:
videoSource = MJPEGVideoSource::createNew(*env, videoES);
// Finally, start playing:
*env << "Beginning to read from file...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
'; // just in case RTCPInstance* rtcp = RTCPInstance::createNew(*env, &rtcpGroupsock, estimatedSessionBandwidth, CNAME, videoSink, NULL /* we're a server */, True /* we're a SSM source */); // Note: This starts RTCP running automatically RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } ServerMediaSession* sms = ServerMediaSession::createNew(*env, "testStream", inputFileName,"Session streamed by \"testMJPEGVideoStreamer\"", True /*SSM*/); sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp)); rtspServer->addServerMediaSession(sms); char* url = rtspServer->rtspURL(sms); *env << "Play this stream using the URL \"" << url << "\"\n"; delete[] url; // Start the streaming: *env << "Beginning streaming...\n"; play(); env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning } void afterPlaying(void* /*clientData*/) { *env << "...done reading from file\n"; videoSink->stopPlaying(); Medium::close(videoSource); // Note that this also closes the input file that this source read from. // Start playing once again: play(); } void play() { // Open the input file as a 'byte-stream file source': ByteStreamFileSource* fileSource = ByteStreamFileSource::createNew(*env, inputFileName); if (fileSource == NULL) { *env << "Unable to open file \"" << inputFileName << "\" as a byte-stream file source\n"; exit(1); } FramedSource* videoES = fileSource; // Create a framer for the Video Elementary Stream: videoSource = MJPEGVideoSource::createNew(*env, videoES); // Finally, start playing: *env << "Beginning to read from file...\n"; videoSink->startPlaying(*videoSource, afterPlaying, videoSink); }

(2)MJPEGVideoSource.hh

// live header
#include "JPEGVideoSource.hh"

/*!*****************************************************************************
 * \brief MJPEG Video Source (RFC 2345)
*************************************************************************************/
class MJPEGVideoSource : public JPEGVideoSource
{
	public:
		static MJPEGVideoSource* createNew (UsageEnvironment& env, FramedSource* source)
		{
			return new MJPEGVideoSource(env,source);
		}
		
	public:
		virtual void doGetNextFrame();

		static void afterGettingFrameSub(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
		{
				MJPEGVideoSource* source = (MJPEGVideoSource*)clientData;
   				source->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
		}		
		void afterGettingFrame(unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds);

		virtual u_int8_t type() { return 1; };
		virtual u_int8_t qFactor() { return 128; };
		virtual u_int8_t width() { return m_width; };
		virtual u_int8_t height() { return m_height; };
		u_int8_t const* quantizationTables( u_int8_t& precision, u_int16_t& length );
		
	protected:
		MJPEGVideoSource(UsageEnvironment& env, FramedSource* source);
		virtual ~MJPEGVideoSource();

	protected:
		FramedSource* m_inputSource;
		u_int8_t      m_width;
		u_int8_t      m_height;
		u_int8_t      m_qTable[128];
		bool          m_qTable0Init;
		bool          m_qTable1Init;
};

(3)MJPEGVideoSource.cpp

// project header
#include "MJPEGVideoSource.hh"

/*!*****************************************************************************
 * \brief Constructor
*************************************************************************************/
MJPEGVideoSource::MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) : 
		JPEGVideoSource(env), 
		m_inputSource(source), 
		m_width(0), 
		m_height(0), 
		m_qTable0Init(false), 
		m_qTable1Init(false) 
{
}

/*!*****************************************************************************
 * \brief Destructor
*************************************************************************************/
MJPEGVideoSource::~MJPEGVideoSource()
{
	if (m_inputSource)
	{
		Medium::close(m_inputSource);
	}
}

/*!*****************************************************************************
 * \brief Ask for a frame from the source
*************************************************************************************/
void MJPEGVideoSource::doGetNextFrame() 
{
	if (m_inputSource)
	{
		m_inputSource->getNextFrame(fTo, fMaxSize, afterGettingFrameSub, this, FramedSource::handleClosure, this);			
	}
}

/*!*****************************************************************************
 * \brief source callback
*************************************************************************************/
void MJPEGVideoSource::afterGettingFrame(unsigned frameSize,unsigned numTruncatedBytes,struct timeval presentationTime,unsigned durationInMicroseconds) 
{
	int headerSize = 0;
	bool headerOk = false;

	for (unsigned int i = 0; i < frameSize ; ++i) 
	{
		// SOF 
		if ( (i+8) < frameSize  && fTo[i] == 0xFF && fTo[i+1] == 0xC0 ) 
		{
			 m_height = (fTo[i+5]<<5)|(fTo[i+6]>>3);
			 m_width = (fTo[i+7]<<5)|(fTo[i+8]>>3);
		}
		// DQT
		if ( (i+5+64) < frameSize && fTo[i] == 0xFF && fTo[i+1] == 0xDB) 
		{
			if (fTo[i+4] ==0)
			{
				memcpy(m_qTable, fTo + i + 5, 64);
				m_qTable0Init = true;
			}
			else if (fTo[i+4] ==1)
			{
				memcpy(m_qTable + 64, fTo + i + 5, 64);
				m_qTable1Init = true;
			}
		}
		// End of header
		if ( (i+1) < frameSize && fTo[i] == 0x3F && fTo[i+1] == 0x00 ) 
		{
			 headerOk = true;
			 headerSize = i+2;
			 break;
		}
	}

	if (headerOk)
	{
		fFrameSize = frameSize - headerSize;
		memcpy( fTo, fTo + headerSize, fFrameSize );
	}
	
	fNumTruncatedBytes = numTruncatedBytes;
	fPresentationTime = presentationTime;
	fDurationInMicroseconds = durationInMicroseconds;
	afterGetting(this);
}		

/*!*****************************************************************************
 * \brief return quantification tables
*************************************************************************************/
u_int8_t const* MJPEGVideoSource::quantizationTables( u_int8_t& precision, u_int16_t& length )
{
	length = 0;
	precision = 0;
	if ( m_qTable0Init && m_qTable1Init )
	{
		precision = 8;
		length = sizeof(m_qTable);
	}
	return m_qTable;
}
		

三、使用

运行testMJPEGVideoStreamer,将播放地址在VLC客户端播放即可。

本文地址:https://blog.csdn.net/Di_Wong/article/details/107284635