diff --git a/src/zm_remote_camera.cpp b/src/zm_remote_camera.cpp index b85263798..99423fca1 100644 --- a/src/zm_remote_camera.cpp +++ b/src/zm_remote_camera.cpp @@ -44,59 +44,59 @@ RemoteCamera::RemoteCamera( path( p_path ), hp( 0 ) { - if ( path[0] != '/' ) - path = '/'+path; + if ( path[0] != '/' ) + path = '/'+path; } RemoteCamera::~RemoteCamera() { - if(hp != NULL) { - freeaddrinfo(hp); - hp = NULL; - } + if(hp != NULL) { + freeaddrinfo(hp); + hp = NULL; + } } void RemoteCamera::Initialise() { - if( protocol.empty() ) - Fatal( "No protocol specified for remote camera" ); + if( protocol.empty() ) + Fatal( "No protocol specified for remote camera" ); - if( host.empty() ) - Fatal( "No host specified for remote camera" ); + if( host.empty() ) + Fatal( "No host specified for remote camera" ); - if( port.empty() ) - Fatal( "No port specified for remote camera" ); + if( port.empty() ) + Fatal( "No port specified for remote camera" ); - //if( path.empty() ) - //Fatal( "No path specified for remote camera" ); + //if( path.empty() ) + //Fatal( "No path specified for remote camera" ); - // Cache as much as we can to speed things up - std::string::size_type authIndex = host.rfind( '@' ); + // Cache as much as we can to speed things up + std::string::size_type authIndex = host.rfind( '@' ); - if ( authIndex != std::string::npos ) - { - auth = host.substr( 0, authIndex ); - host.erase( 0, authIndex+1 ); - auth64 = base64Encode( auth ); + if ( authIndex != std::string::npos ) + { + auth = host.substr( 0, authIndex ); + host.erase( 0, authIndex+1 ); + auth64 = base64Encode( auth ); - authIndex = auth.rfind( ':' ); - username = auth.substr(0,authIndex); - password = auth.substr( authIndex+1, auth.length() ); + authIndex = auth.rfind( ':' ); + username = auth.substr(0,authIndex); + password = auth.substr( authIndex+1, auth.length() ); - } + } - mNeedAuth = false; - mAuthenticator = new zm::Authenticator(username,password); + mNeedAuth = false; + mAuthenticator = new zm::Authenticator(username,password); - struct addrinfo hints; - memset(&hints, 0, sizeof(hints)); - hints.ai_family = AF_UNSPEC; - hints.ai_socktype = SOCK_STREAM; + struct addrinfo hints; + memset(&hints, 0, sizeof(hints)); + hints.ai_family = AF_UNSPEC; + hints.ai_socktype = SOCK_STREAM; - int ret = getaddrinfo(host.c_str(), port.c_str(), &hints, &hp); - if ( ret != 0 ) - { - Fatal( "Can't getaddrinfo(%s port %s): %s", host.c_str(), port.c_str(), gai_strerror(ret) ); - } + int ret = getaddrinfo(host.c_str(), port.c_str(), &hints, &hp); + if ( ret != 0 ) + { + Fatal( "Can't getaddrinfo(%s port %s): %s", host.c_str(), port.c_str(), gai_strerror(ret) ); + } } diff --git a/src/zm_remote_camera_http.h b/src/zm_remote_camera_http.h index 395ae3975..fe5823397 100644 --- a/src/zm_remote_camera_http.h +++ b/src/zm_remote_camera_http.h @@ -30,8 +30,7 @@ // Class representing 'http' cameras, i.e. those which are // accessed over a network connection using http // -class RemoteCameraHttp : public RemoteCamera -{ +class RemoteCameraHttp : public RemoteCamera { protected: std::string request; struct timeval timeout; diff --git a/src/zm_remote_camera_rtsp.cpp b/src/zm_remote_camera_rtsp.cpp index 5a9028aac..9b0b6b41d 100644 --- a/src/zm_remote_camera_rtsp.cpp +++ b/src/zm_remote_camera_rtsp.cpp @@ -45,8 +45,7 @@ RemoteCameraRtsp::RemoteCameraRtsp( unsigned int p_monitor_id, const std::string else Fatal( "Unrecognised method '%s' when creating RTSP camera %d", p_method.c_str(), monitor_id ); - if ( capture ) - { + if ( capture ) { Initialise(); } @@ -76,36 +75,30 @@ RemoteCameraRtsp::RemoteCameraRtsp( unsigned int p_monitor_id, const std::string } else { Panic("Unexpected colours: %d",colours); } - -} +} // end RemoteCameraRtsp::RemoteCameraRtsp(...) -RemoteCameraRtsp::~RemoteCameraRtsp() -{ +RemoteCameraRtsp::~RemoteCameraRtsp() { av_frame_free( &mFrame ); av_frame_free( &mRawFrame ); #if HAVE_LIBSWSCALE - if ( mConvertContext ) - { + if ( mConvertContext ) { sws_freeContext( mConvertContext ); mConvertContext = NULL; } #endif - if ( mCodecContext ) - { + if ( mCodecContext ) { avcodec_close( mCodecContext ); mCodecContext = NULL; // Freed by avformat_free_context in the destructor of RtspThread class } - if ( capture ) - { + if ( capture ) { Terminate(); } } -void RemoteCameraRtsp::Initialise() -{ +void RemoteCameraRtsp::Initialise() { RemoteCamera::Initialise(); int max_size = width*height*colours; @@ -124,13 +117,11 @@ void RemoteCameraRtsp::Initialise() Connect(); } -void RemoteCameraRtsp::Terminate() -{ +void RemoteCameraRtsp::Terminate() { Disconnect(); } -int RemoteCameraRtsp::Connect() -{ +int RemoteCameraRtsp::Connect() { rtspThread = new RtspThread( monitor_id, method, protocol, host, port, path, auth, rtsp_describe ); rtspThread->start(); @@ -138,10 +129,8 @@ int RemoteCameraRtsp::Connect() return( 0 ); } -int RemoteCameraRtsp::Disconnect() -{ - if ( rtspThread ) - { +int RemoteCameraRtsp::Disconnect() { + if ( rtspThread ) { rtspThread->stop(); rtspThread->join(); delete rtspThread; @@ -150,11 +139,9 @@ int RemoteCameraRtsp::Disconnect() return( 0 ); } -int RemoteCameraRtsp::PrimeCapture() -{ +int RemoteCameraRtsp::PrimeCapture() { Debug( 2, "Waiting for sources" ); - for ( int i = 0; i < 100 && !rtspThread->hasSources(); i++ ) - { + for ( int i = 0; i < 100 && !rtspThread->hasSources(); i++ ) { usleep( 100000 ); } if ( !rtspThread->hasSources() ) @@ -241,7 +228,7 @@ int RemoteCameraRtsp::PrimeCapture() int pSize = avpicture_get_size( imagePixFormat, width, height ); #endif - if( (unsigned int)pSize != imagesize) { + if ( (unsigned int)pSize != imagesize ) { Fatal("Image size mismatch. Required: %d Available: %d",pSize,imagesize); } /* @@ -265,8 +252,7 @@ int RemoteCameraRtsp::PrimeCapture() int RemoteCameraRtsp::PreCapture() { if ( !rtspThread->isRunning() ) return( -1 ); - if ( !rtspThread->hasSources() ) - { + if ( !rtspThread->hasSources() ) { Error( "Cannot precapture, no RTP sources" ); return( -1 ); } @@ -303,25 +289,20 @@ int RemoteCameraRtsp::Capture( Image &image ) { int nalType = (buffer.head()[3] & 0x1f); // SPS The SPS NAL unit contains parameters that apply to a series of consecutive coded video pictures - if(nalType == 7) - { + if(nalType == 7) { lastSps = buffer; continue; - } + } else if(nalType == 8) { // PPS The PPS NAL unit contains parameters that apply to the decoding of one or more individual pictures inside a coded video sequence - else if(nalType == 8) - { lastPps = buffer; continue; - } + } else if(nalType == 5) { // IDR - else if(nalType == 5) - { buffer += lastSps; buffer += lastPps; } - } else { - Debug(3, "Not an h264 packet"); + } else { + Debug(3, "Not an h264 packet"); } av_init_packet( &packet ); @@ -382,7 +363,7 @@ int RemoteCameraRtsp::Capture( Image &image ) { } // end while true // can never get here. - return (0) ; + return (0); } //Function to handle capture and store @@ -392,7 +373,6 @@ int RemoteCameraRtsp::CaptureAndRecord(Image &image, timeval recording, char* ev uint8_t* directbuffer; int frameComplete = false; - while ( true ) { // WHY Are we clearing it? Might be something good in it. @@ -450,14 +430,12 @@ int RemoteCameraRtsp::CaptureAndRecord(Image &image, timeval recording, char* ev if(nalType == 7) { lastSps = buffer; continue; - } + } else if(nalType == 8) { // PPS - else if(nalType == 8) { lastPps = buffer; continue; - } + } else if(nalType == 5) { // IDR - else if(nalType == 5) { buffer += lastSps; buffer += lastPps; } @@ -525,14 +503,14 @@ int RemoteCameraRtsp::CaptureAndRecord(Image &image, timeval recording, char* ev #if HAVE_LIBSWSCALE // Why are we re-scaling after writing out the packet? if ( mConvertContext == NULL ) { - mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); + mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); - if ( mConvertContext == NULL ) - Fatal( "Unable to create conversion context"); + if ( mConvertContext == NULL ) + Fatal( "Unable to create conversion context"); } if ( sws_scale( mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mCodecContext->height, mFrame->data, mFrame->linesize ) < 0 ) - Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); + Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); #else // HAVE_LIBSWSCALE Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" ); #endif // HAVE_LIBSWSCALE diff --git a/src/zm_remote_camera_rtsp.h b/src/zm_remote_camera_rtsp.h index 6080902b6..8ed8b713c 100644 --- a/src/zm_remote_camera_rtsp.h +++ b/src/zm_remote_camera_rtsp.h @@ -27,6 +27,7 @@ #include "zm_rtsp.h" #include "zm_ffmpeg.h" #include "zm_videostore.h" +#include "zm_packetqueue.h" // // Class representing 'rtsp' cameras, i.e. those which are @@ -52,16 +53,16 @@ protected: RtspThread *rtspThread; int frameCount; - + #if HAVE_LIBAVFORMAT - AVFormatContext *mFormatContext; - int mVideoStreamId; - int mAudioStreamId; - AVCodecContext *mCodecContext; - AVCodec *mCodec; - AVFrame *mRawFrame; - AVFrame *mFrame; - _AVPIXELFORMAT imagePixFormat; + AVFormatContext *mFormatContext; + int mVideoStreamId; + int mAudioStreamId; + AVCodecContext *mCodecContext; + AVCodec *mCodec; + AVFrame *mRawFrame; + AVFrame *mFrame; + _AVPIXELFORMAT imagePixFormat; #endif // HAVE_LIBAVFORMAT bool wasRecording; VideoStore *videoStore; diff --git a/src/zm_stream.cpp b/src/zm_stream.cpp index 01b9fa4ed..525d7b8ef 100644 --- a/src/zm_stream.cpp +++ b/src/zm_stream.cpp @@ -302,8 +302,7 @@ void StreamBase::openComms() { Error("Unable to open sock lock file %s: %s", sock_path_lock, strerror(errno) ); lock_fd = 0; - } - else if ( flock(lock_fd, LOCK_EX) != 0 ) + } else if ( flock(lock_fd, LOCK_EX) != 0 ) { Error("Unable to lock sock lock file %s: %s", sock_path_lock, strerror(errno) ); close(lock_fd); @@ -318,6 +317,8 @@ void StreamBase::openComms() if ( sd < 0 ) { Fatal( "Can't create socket: %s", strerror(errno) ); + } else { + Debug(3, "Have socket %d", sd ); } length = snprintf( loc_sock_path, sizeof(loc_sock_path), "%s/zms-%06ds.sock", config.path_socks, connkey ); @@ -332,6 +333,7 @@ void StreamBase::openComms() strncpy( loc_addr.sun_path, loc_sock_path, sizeof(loc_addr.sun_path) ); loc_addr.sun_family = AF_UNIX; + Debug(3, "Binding to %s", loc_sock_path ); if ( bind( sd, (struct sockaddr *)&loc_addr, strlen(loc_addr.sun_path)+sizeof(loc_addr.sun_family)+1 ) < 0 ) { Fatal( "Can't bind: %s", strerror(errno) ); @@ -341,6 +343,7 @@ void StreamBase::openComms() strncpy( rem_addr.sun_path, rem_sock_path, sizeof(rem_addr.sun_path) ); rem_addr.sun_family = AF_UNIX; } // end if connKey > 0 + Debug(3, "comms open" ); } void StreamBase::closeComms() diff --git a/src/zm_videostore.cpp b/src/zm_videostore.cpp index 82adbc35b..99b521c3c 100644 --- a/src/zm_videostore.cpp +++ b/src/zm_videostore.cpp @@ -549,7 +549,7 @@ void VideoStore::dumpPacket( AVPacket *pkt ){ int VideoStore::writeVideoFramePacket( AVPacket *ipkt ) { av_init_packet(&opkt); - int duration = 0; + int duration; //Scale the PTS of the outgoing packet to be the correct time base if (ipkt->pts != AV_NOPTS_VALUE) {