From 894ad11816cc3b562d7c6aff543e98cb973fe3ab Mon Sep 17 00:00:00 2001 From: Isaac Connor Date: Thu, 7 Jan 2021 09:46:06 -0500 Subject: [PATCH] change outputFd which is unused to stream_id which is the stream_id to concern ourselves with when getting packets from the packetqueue. Add more tests for zm_terminate. Add testing for AV_CODEC_ID instead of v4l2 PIX_FMT to determine stream type --- src/zm_rtsp_server_device_source.cpp | 19 ++++++++++++++----- src/zm_rtsp_server_device_source.h | 6 +++--- src/zm_rtsp_server_thread.cpp | 22 +++++++++++----------- src/zm_rtsp_server_thread.h | 6 ++++-- 4 files changed, 32 insertions(+), 21 deletions(-) diff --git a/src/zm_rtsp_server_device_source.cpp b/src/zm_rtsp_server_device_source.cpp index 328c667ea..d5d8c91b9 100644 --- a/src/zm_rtsp_server_device_source.cpp +++ b/src/zm_rtsp_server_device_source.cpp @@ -36,7 +36,7 @@ int ZoneMinderDeviceSource::Stats::notify(int tv_sec, int framesize) { ZoneMinderDeviceSource::ZoneMinderDeviceSource( UsageEnvironment& env, Monitor* monitor, - int outputFd, + int stream_id, unsigned int queueSize, bool useThread) : FramedSource(env), @@ -45,7 +45,7 @@ ZoneMinderDeviceSource::ZoneMinderDeviceSource( packetBufferPtr(nullptr), m_in("in"), m_out("out") , - m_outfd(outputFd), + m_stream_id(stream_id), m_monitor(monitor), m_packetqueue(nullptr), m_packetqueue_it(nullptr), @@ -155,21 +155,30 @@ void ZoneMinderDeviceSource::incomingPacketHandler() { // read from monitor int ZoneMinderDeviceSource::getNextFrame() { + if ( zm_terminate ) + return -1; + timeval ref; gettimeofday(&ref, nullptr); if ( !m_packetqueue_it ) { m_packetqueue_it = m_packetqueue->get_video_it(true); - return -1; } ZMPacket *zm_packet = m_packetqueue->get_packet(m_packetqueue_it); + while ( zm_packet and (zm_packet->packet.stream_index != m_stream_id) ) { + zm_packet->unlock(); + // We want our stream to start at the same it as the video + // but if this is an audio stream we need to increment past that first packet + m_packetqueue->increment_it(m_packetqueue_it, m_stream_id); + zm_packet = m_packetqueue->get_packet(m_packetqueue_it); + } if ( !zm_packet ) { Debug(1, "null zm_packet %p", zm_packet); return -1; } // packet is locked AVPacket pkt = zm_packet->packet; - m_packetqueue->increment_it(m_packetqueue_it); + m_packetqueue->increment_it(m_packetqueue_it, m_stream_id); if ( !packetBufferSize ) { packetBufferSize = pkt.size * 2; @@ -205,6 +214,7 @@ int ZoneMinderDeviceSource::getNextFrame() { memcpy(packetBufferPtr, pkt.data, pkt.size); packetBufferPtr += pkt.size; zm_packet->unlock(); + zm_packet = nullptr;// we no longer have the lock so shouldn't be accessing it size_t frame_size; size_t pkt_size = packetBufferPtr-packetBuffer; @@ -223,7 +233,6 @@ int ZoneMinderDeviceSource::getNextFrame() { Debug(1, "Have nal frame at %p size %d. Remaining pktsize %d", data, frame_size, pkt_size); NAL_Frame *frame = new NAL_Frame(data, frame_size, tv); //frame->check(); - zm_packet->unlock(); timeval diff; timersub(&tv, &ref, &diff); diff --git a/src/zm_rtsp_server_device_source.h b/src/zm_rtsp_server_device_source.h index 595f7b1bb..1568e2e91 100644 --- a/src/zm_rtsp_server_device_source.h +++ b/src/zm_rtsp_server_device_source.h @@ -56,7 +56,7 @@ class ZoneMinderDeviceSource: public FramedSource { static ZoneMinderDeviceSource* createNew( UsageEnvironment& env, Monitor* monitor, - int outputFd, + int stream_id, unsigned int queueSize, bool useThread); std::string getAuxLine() { return m_auxLine; }; @@ -64,7 +64,7 @@ class ZoneMinderDeviceSource: public FramedSource { int getHeight() { return m_monitor->Height(); }; protected: - ZoneMinderDeviceSource(UsageEnvironment& env, Monitor* monitor, int outputFd, unsigned int queueSize, bool useThread); + ZoneMinderDeviceSource(UsageEnvironment& env, Monitor* monitor, int stream_id, unsigned int queueSize, bool useThread); virtual ~ZoneMinderDeviceSource(); protected: @@ -95,7 +95,7 @@ class ZoneMinderDeviceSource: public FramedSource { Stats m_in; Stats m_out; EventTriggerId m_eventTriggerId; - int m_outfd; + int m_stream_id; Monitor* m_monitor; zm_packetqueue *m_packetqueue; std::list::iterator *m_packetqueue_it; diff --git a/src/zm_rtsp_server_thread.cpp b/src/zm_rtsp_server_thread.cpp index 76da1d231..bb3ce5d6e 100644 --- a/src/zm_rtsp_server_thread.cpp +++ b/src/zm_rtsp_server_thread.cpp @@ -57,13 +57,12 @@ bool RTSPServerThread::stopped() const { return terminate ? true : false; } // end RTSPServerThread::stopped() -void RTSPServerThread::addStream() { +void RTSPServerThread::addStream(AVStream *stream) { if ( !rtspServer ) return; int queueSize = 10; bool useThread = true; - int outfd = 0; bool repeatConfig = true; StreamReplicator* videoReplicator = nullptr; @@ -71,7 +70,7 @@ void RTSPServerThread::addStream() { // We don't know which format we can support at this time. // Do we make it configurable, or wait until PrimeCapture to determine what is available - rtpFormat.assign(getRtpFormat(PIX_FMT_HEVC, false)); + rtpFormat.assign(getRtpFormat(stream->codecpar->codec_id, false)); Debug(1, "RTSP: format %s", rtpFormat.c_str()); if ( rtpFormat.empty() ) { //LOG(ERROR) << "No Streaming format supported for device " << camera_name.c_str() << std::endl; @@ -84,7 +83,7 @@ void RTSPServerThread::addStream() { FramedSource *source = nullptr; if ( rtpFormat == "video/H264" ) { - source = H264_ZoneMinderDeviceSource::createNew(*env, monitor, outfd, queueSize, useThread, repeatConfig, muxTS); + source = H264_ZoneMinderDeviceSource::createNew(*env, monitor, stream->index, queueSize, useThread, repeatConfig, muxTS); #if 0 if ( muxTS ) { muxer->addNewVideoSource(source, 5); @@ -92,7 +91,7 @@ void RTSPServerThread::addStream() { } #endif } else if ( rtpFormat == "video/H265" ) { - source = H265_ZoneMinderDeviceSource::createNew(*env, monitor, outfd, queueSize, useThread, repeatConfig, muxTS); + source = H265_ZoneMinderDeviceSource::createNew(*env, monitor, stream->index, queueSize, useThread, repeatConfig, muxTS); #if 0 if ( muxTS ) { muxer->addNewVideoSource(source, 6); @@ -156,18 +155,19 @@ int RTSPServerThread::addSession( // ----------------------------------------- // convert V4L2 pix format to RTP mime // ----------------------------------------- -std::string RTSPServerThread::getRtpFormat(int format, bool muxTS) { +std::string RTSPServerThread::getRtpFormat(AVCodecID codec_id, bool muxTS) { std::string rtpFormat; if (muxTS) { rtpFormat = "video/MP2T"; } else { - switch(format) { - case PIX_FMT_HEVC : rtpFormat = "video/H265"; break; - case PIX_FMT_H264 : rtpFormat = "video/H264"; break; + switch ( codec_id ) { + case AV_CODEC_ID_H265 : rtpFormat = "video/H265"; break; + case AV_CODEC_ID_H264 : rtpFormat = "video/H264"; break; //case PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break; //case PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break; - case PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break; - case PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break; + //case AV_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break; + //case AV_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break; + default: break; } } diff --git a/src/zm_rtsp_server_thread.h b/src/zm_rtsp_server_thread.h index addd84245..347c7a248 100644 --- a/src/zm_rtsp_server_thread.h +++ b/src/zm_rtsp_server_thread.h @@ -11,6 +11,8 @@ #include #include +#include +#include class RTSPServerThread : public Thread { private: @@ -26,12 +28,12 @@ class RTSPServerThread : public Thread { public: explicit RTSPServerThread(Monitor *); ~RTSPServerThread(); - void addStream(); + void addStream(AVStream *); int run(); void stop(); bool stopped() const; private: - std::string getRtpFormat(int format, bool muxTS); + std::string getRtpFormat(AVCodecID codec, bool muxTS); int addSession( const std::string & sessionName, const std::list & subSession