change outputFd which is unused to stream_id which is the stream_id to concern ourselves with when getting packets from the packetqueue. Add more tests for zm_terminate. Add testing for AV_CODEC_ID instead of v4l2 PIX_FMT to determine stream type

This commit is contained in:
Isaac Connor 2021-01-07 09:46:06 -05:00
parent abc402878b
commit 5cca440bee
4 changed files with 32 additions and 21 deletions

View File

@ -36,7 +36,7 @@ int ZoneMinderDeviceSource::Stats::notify(int tv_sec, int framesize) {
ZoneMinderDeviceSource::ZoneMinderDeviceSource( ZoneMinderDeviceSource::ZoneMinderDeviceSource(
UsageEnvironment& env, UsageEnvironment& env,
Monitor* monitor, Monitor* monitor,
int outputFd, int stream_id,
unsigned int queueSize, unsigned int queueSize,
bool useThread) : bool useThread) :
FramedSource(env), FramedSource(env),
@ -45,7 +45,7 @@ ZoneMinderDeviceSource::ZoneMinderDeviceSource(
packetBufferPtr(nullptr), packetBufferPtr(nullptr),
m_in("in"), m_in("in"),
m_out("out") , m_out("out") ,
m_outfd(outputFd), m_stream_id(stream_id),
m_monitor(monitor), m_monitor(monitor),
m_packetqueue(nullptr), m_packetqueue(nullptr),
m_packetqueue_it(nullptr), m_packetqueue_it(nullptr),
@ -155,21 +155,30 @@ void ZoneMinderDeviceSource::incomingPacketHandler() {
// read from monitor // read from monitor
int ZoneMinderDeviceSource::getNextFrame() { int ZoneMinderDeviceSource::getNextFrame() {
if ( zm_terminate )
return -1;
timeval ref; timeval ref;
gettimeofday(&ref, nullptr); gettimeofday(&ref, nullptr);
if ( !m_packetqueue_it ) { if ( !m_packetqueue_it ) {
m_packetqueue_it = m_packetqueue->get_video_it(true); m_packetqueue_it = m_packetqueue->get_video_it(true);
return -1;
} }
ZMPacket *zm_packet = m_packetqueue->get_packet(m_packetqueue_it); ZMPacket *zm_packet = m_packetqueue->get_packet(m_packetqueue_it);
while ( zm_packet and (zm_packet->packet.stream_index != m_stream_id) ) {
zm_packet->unlock();
// We want our stream to start at the same it as the video
// but if this is an audio stream we need to increment past that first packet
m_packetqueue->increment_it(m_packetqueue_it, m_stream_id);
zm_packet = m_packetqueue->get_packet(m_packetqueue_it);
}
if ( !zm_packet ) { if ( !zm_packet ) {
Debug(1, "null zm_packet %p", zm_packet); Debug(1, "null zm_packet %p", zm_packet);
return -1; return -1;
} }
// packet is locked // packet is locked
AVPacket pkt = zm_packet->packet; AVPacket pkt = zm_packet->packet;
m_packetqueue->increment_it(m_packetqueue_it); m_packetqueue->increment_it(m_packetqueue_it, m_stream_id);
if ( !packetBufferSize ) { if ( !packetBufferSize ) {
packetBufferSize = pkt.size * 2; packetBufferSize = pkt.size * 2;
@ -205,6 +214,7 @@ int ZoneMinderDeviceSource::getNextFrame() {
memcpy(packetBufferPtr, pkt.data, pkt.size); memcpy(packetBufferPtr, pkt.data, pkt.size);
packetBufferPtr += pkt.size; packetBufferPtr += pkt.size;
zm_packet->unlock(); zm_packet->unlock();
zm_packet = nullptr;// we no longer have the lock so shouldn't be accessing it
size_t frame_size; size_t frame_size;
size_t pkt_size = packetBufferPtr-packetBuffer; size_t pkt_size = packetBufferPtr-packetBuffer;
@ -223,7 +233,6 @@ int ZoneMinderDeviceSource::getNextFrame() {
Debug(1, "Have nal frame at %p size %d. Remaining pktsize %d", data, frame_size, pkt_size); Debug(1, "Have nal frame at %p size %d. Remaining pktsize %d", data, frame_size, pkt_size);
NAL_Frame *frame = new NAL_Frame(data, frame_size, tv); NAL_Frame *frame = new NAL_Frame(data, frame_size, tv);
//frame->check(); //frame->check();
zm_packet->unlock();
timeval diff; timeval diff;
timersub(&tv, &ref, &diff); timersub(&tv, &ref, &diff);

View File

@ -56,7 +56,7 @@ class ZoneMinderDeviceSource: public FramedSource {
static ZoneMinderDeviceSource* createNew( static ZoneMinderDeviceSource* createNew(
UsageEnvironment& env, UsageEnvironment& env,
Monitor* monitor, Monitor* monitor,
int outputFd, int stream_id,
unsigned int queueSize, unsigned int queueSize,
bool useThread); bool useThread);
std::string getAuxLine() { return m_auxLine; }; std::string getAuxLine() { return m_auxLine; };
@ -64,7 +64,7 @@ class ZoneMinderDeviceSource: public FramedSource {
int getHeight() { return m_monitor->Height(); }; int getHeight() { return m_monitor->Height(); };
protected: protected:
ZoneMinderDeviceSource(UsageEnvironment& env, Monitor* monitor, int outputFd, unsigned int queueSize, bool useThread); ZoneMinderDeviceSource(UsageEnvironment& env, Monitor* monitor, int stream_id, unsigned int queueSize, bool useThread);
virtual ~ZoneMinderDeviceSource(); virtual ~ZoneMinderDeviceSource();
protected: protected:
@ -95,7 +95,7 @@ class ZoneMinderDeviceSource: public FramedSource {
Stats m_in; Stats m_in;
Stats m_out; Stats m_out;
EventTriggerId m_eventTriggerId; EventTriggerId m_eventTriggerId;
int m_outfd; int m_stream_id;
Monitor* m_monitor; Monitor* m_monitor;
zm_packetqueue *m_packetqueue; zm_packetqueue *m_packetqueue;
std::list<ZMPacket *>::iterator *m_packetqueue_it; std::list<ZMPacket *>::iterator *m_packetqueue_it;

View File

@ -57,13 +57,12 @@ bool RTSPServerThread::stopped() const {
return terminate ? true : false; return terminate ? true : false;
} // end RTSPServerThread::stopped() } // end RTSPServerThread::stopped()
void RTSPServerThread::addStream() { void RTSPServerThread::addStream(AVStream *stream) {
if ( !rtspServer ) if ( !rtspServer )
return; return;
int queueSize = 10; int queueSize = 10;
bool useThread = true; bool useThread = true;
int outfd = 0;
bool repeatConfig = true; bool repeatConfig = true;
StreamReplicator* videoReplicator = nullptr; StreamReplicator* videoReplicator = nullptr;
@ -71,7 +70,7 @@ void RTSPServerThread::addStream() {
// We don't know which format we can support at this time. // We don't know which format we can support at this time.
// Do we make it configurable, or wait until PrimeCapture to determine what is available // Do we make it configurable, or wait until PrimeCapture to determine what is available
rtpFormat.assign(getRtpFormat(PIX_FMT_HEVC, false)); rtpFormat.assign(getRtpFormat(stream->codecpar->codec_id, false));
Debug(1, "RTSP: format %s", rtpFormat.c_str()); Debug(1, "RTSP: format %s", rtpFormat.c_str());
if ( rtpFormat.empty() ) { if ( rtpFormat.empty() ) {
//LOG(ERROR) << "No Streaming format supported for device " << camera_name.c_str() << std::endl; //LOG(ERROR) << "No Streaming format supported for device " << camera_name.c_str() << std::endl;
@ -84,7 +83,7 @@ void RTSPServerThread::addStream() {
FramedSource *source = nullptr; FramedSource *source = nullptr;
if ( rtpFormat == "video/H264" ) { if ( rtpFormat == "video/H264" ) {
source = H264_ZoneMinderDeviceSource::createNew(*env, monitor, outfd, queueSize, useThread, repeatConfig, muxTS); source = H264_ZoneMinderDeviceSource::createNew(*env, monitor, stream->index, queueSize, useThread, repeatConfig, muxTS);
#if 0 #if 0
if ( muxTS ) { if ( muxTS ) {
muxer->addNewVideoSource(source, 5); muxer->addNewVideoSource(source, 5);
@ -92,7 +91,7 @@ void RTSPServerThread::addStream() {
} }
#endif #endif
} else if ( rtpFormat == "video/H265" ) { } else if ( rtpFormat == "video/H265" ) {
source = H265_ZoneMinderDeviceSource::createNew(*env, monitor, outfd, queueSize, useThread, repeatConfig, muxTS); source = H265_ZoneMinderDeviceSource::createNew(*env, monitor, stream->index, queueSize, useThread, repeatConfig, muxTS);
#if 0 #if 0
if ( muxTS ) { if ( muxTS ) {
muxer->addNewVideoSource(source, 6); muxer->addNewVideoSource(source, 6);
@ -156,18 +155,19 @@ int RTSPServerThread::addSession(
// ----------------------------------------- // -----------------------------------------
// convert V4L2 pix format to RTP mime // convert V4L2 pix format to RTP mime
// ----------------------------------------- // -----------------------------------------
std::string RTSPServerThread::getRtpFormat(int format, bool muxTS) { std::string RTSPServerThread::getRtpFormat(AVCodecID codec_id, bool muxTS) {
std::string rtpFormat; std::string rtpFormat;
if (muxTS) { if (muxTS) {
rtpFormat = "video/MP2T"; rtpFormat = "video/MP2T";
} else { } else {
switch(format) { switch ( codec_id ) {
case PIX_FMT_HEVC : rtpFormat = "video/H265"; break; case AV_CODEC_ID_H265 : rtpFormat = "video/H265"; break;
case PIX_FMT_H264 : rtpFormat = "video/H264"; break; case AV_CODEC_ID_H264 : rtpFormat = "video/H264"; break;
//case PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break; //case PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break;
//case PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break; //case PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break;
case PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break; //case AV_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
case PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break; //case AV_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
default: break;
} }
} }

View File

@ -11,6 +11,8 @@
#include <BasicUsageEnvironment.hh> #include <BasicUsageEnvironment.hh>
#include <RTSPServer.hh> #include <RTSPServer.hh>
#include <libavcodec/codec_id.h>
#include <libavformat/avformat.h>
class RTSPServerThread : public Thread { class RTSPServerThread : public Thread {
private: private:
@ -26,12 +28,12 @@ class RTSPServerThread : public Thread {
public: public:
explicit RTSPServerThread(Monitor *); explicit RTSPServerThread(Monitor *);
~RTSPServerThread(); ~RTSPServerThread();
void addStream(); void addStream(AVStream *);
int run(); int run();
void stop(); void stop();
bool stopped() const; bool stopped() const;
private: private:
std::string getRtpFormat(int format, bool muxTS); std::string getRtpFormat(AVCodecID codec, bool muxTS);
int addSession( int addSession(
const std::string & sessionName, const std::string & sessionName,
const std::list<ServerMediaSubsession*> & subSession const std::list<ServerMediaSubsession*> & subSession