This commit is contained in:
Isaac Connor 2017-11-12 11:50:07 -05:00
parent 3dafb5c225
commit 50d3b168fa
4 changed files with 62 additions and 31 deletions

View File

@ -29,10 +29,13 @@ int AnalysisThread::run() {
if ( analysis_update_delay ) { if ( analysis_update_delay ) {
cur_time = time( 0 ); cur_time = time( 0 );
if ( (unsigned int)( cur_time - last_analysis_update_time ) > analysis_update_delay ) { if ( (unsigned int)( cur_time - last_analysis_update_time ) > analysis_update_delay ) {
Debug(4, "Updating " );
analysis_rate = monitor->GetAnalysisRate(); analysis_rate = monitor->GetAnalysisRate();
monitor->UpdateAdaptiveSkip(); monitor->UpdateAdaptiveSkip();
last_analysis_update_time = cur_time; last_analysis_update_time = cur_time;
} }
} else {
Debug(4, "Not Updating " );
} }
if ( !monitor->Analyse() ) { if ( !monitor->Analyse() ) {
@ -44,6 +47,6 @@ Debug(4, "Sleeping for %d", analysis_rate);
} }
//sigprocmask(SIG_UNBLOCK, &block_set, 0); //sigprocmask(SIG_UNBLOCK, &block_set, 0);
} } // end while ! terminate
return 0; return 0;
} // end in AnalysisThread::run() } // end in AnalysisThread::run()

View File

@ -1898,14 +1898,14 @@ int LocalCamera::PrimeCapture() {
} }
int LocalCamera::PreCapture() { int LocalCamera::PreCapture() {
Debug( 2, "Pre-capturing" ); Debug( 4, "Pre-capturing" );
return( 0 ); return( 0 );
} }
int LocalCamera::Capture( ZMPacket &zm_packet ) { int LocalCamera::Capture( ZMPacket &zm_packet ) {
// We assume that the avpacket is allocated, and just needs to be filled // We assume that the avpacket is allocated, and just needs to be filled
Debug( 3, "Capturing" ); Debug( 2, "Capturing" );
static uint8_t* buffer = NULL; static uint8_t* buffer = NULL;
static uint8_t* directbuffer = NULL; static uint8_t* directbuffer = NULL;
static int capture_frame = -1; static int capture_frame = -1;
@ -2000,7 +2000,7 @@ int LocalCamera::Capture( ZMPacket &zm_packet ) {
#if HAVE_LIBSWSCALE #if HAVE_LIBSWSCALE
if ( conversion_type == 1 ) { if ( conversion_type == 1 ) {
Debug( 9, "Calling sws_scale to perform the conversion" ); Debug( 9, "Setting up a frame" );
/* Use swscale to convert the image directly into the shared memory */ /* Use swscale to convert the image directly into the shared memory */
#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0)
av_image_fill_arrays(tmpPicture->data, av_image_fill_arrays(tmpPicture->data,
@ -2010,6 +2010,7 @@ int LocalCamera::Capture( ZMPacket &zm_packet ) {
avpicture_fill( (AVPicture *)tmpPicture, directbuffer, avpicture_fill( (AVPicture *)tmpPicture, directbuffer,
imagePixFormat, width, height ); imagePixFormat, width, height );
#endif #endif
Debug( 9, "Calling sws_scale to perform the conversion" );
sws_scale( sws_scale(
imgConversionContext, imgConversionContext,
capturePictures[capture_frame]->data, capturePictures[capture_frame]->data,
@ -2019,6 +2020,7 @@ int LocalCamera::Capture( ZMPacket &zm_packet ) {
tmpPicture->data, tmpPicture->data,
tmpPicture->linesize tmpPicture->linesize
); );
Debug( 9, "Done sws_scale to perform the conversion" );
} else } else
#endif #endif
if ( conversion_type == 2 ) { if ( conversion_type == 2 ) {
@ -2071,7 +2073,7 @@ int LocalCamera::PostCapture() {
} else { } else {
Error( "Unable to requeue buffer due to not v4l2_data" ) Error( "Unable to requeue buffer due to not v4l2_data" )
} }
} } else
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1 #if ZM_HAS_V4L1
if ( v4l_version == 1 ) { if ( v4l_version == 1 ) {

View File

@ -1149,6 +1149,7 @@ bool Monitor::Analyse() {
//Debug(3, " shared_data->last_read_index == shared_data->last_write_index " ); //Debug(3, " shared_data->last_read_index == shared_data->last_write_index " );
return false; return false;
} }
Debug(3, "ANal");
struct timeval now; struct timeval now;
gettimeofday( &now, NULL ); gettimeofday( &now, NULL );
@ -2870,6 +2871,7 @@ int Monitor::Capture() {
} }
} else { } else {
captureResult = camera->Capture(packet); captureResult = camera->Capture(packet);
Debug(2,"Capture result (%d)", captureResult );
if ( captureResult < 0 ) { if ( captureResult < 0 ) {
// Unable to capture image for temporary reason // Unable to capture image for temporary reason
// Fake a signal loss image // Fake a signal loss image
@ -2882,13 +2884,15 @@ int Monitor::Capture() {
} }
int video_stream_id = camera->get_VideoStreamId(); int video_stream_id = camera->get_VideoStreamId();
Debug(2,"Video stream is (%d)", video_stream_id );
//Video recording //Video recording
if ( video_store_data->recording.tv_sec ) { if ( video_store_data->recording.tv_sec ) {
if ( shared_data->last_event_id != this->GetVideoWriterEventId() ) { Debug(2,"Recording since (%d)", video_store_data->recording.tv_sec );
if ( shared_data->last_event_id != video_store_data->current_event ) {
Debug(2, "Have change of event. last_event(%d), our current (%d)", Debug(2, "Have change of event. last_event(%d), our current (%d)",
shared_data->last_event_id, shared_data->last_event_id,
this->GetVideoWriterEventId() video_store_data->current_event
); );
if ( videoStore ) { if ( videoStore ) {
Debug(2, "Have videostore already?"); Debug(2, "Have videostore already?");
@ -2903,6 +2907,8 @@ Debug(2, "Have videostore already?");
videoStore = NULL; videoStore = NULL;
this->SetVideoWriterEventId( 0 ); this->SetVideoWriterEventId( 0 );
} // end if videoStore } // end if videoStore
} else {
Debug(2, "No change of event");
} // end if end of recording } // end if end of recording
if ( shared_data->last_event_id and ! videoStore ) { if ( shared_data->last_event_id and ! videoStore ) {
@ -2933,27 +2939,34 @@ Debug(2,"New videostore");
delete videoStore; delete videoStore;
videoStore = NULL; videoStore = NULL;
this->SetVideoWriterEventId( 0 ); this->SetVideoWriterEventId( 0 );
} else {
Debug(2,"Not recording");
} }
// Buffer video packets, since we are not recording. // Buffer video packets, since we are not recording.
// All audio packets are keyframes, so only if it's a video keyframe // All audio packets are keyframes, so only if it's a video keyframe
if ( ( packet.packet.stream_index == video_stream_id ) && ( packet.keyframe ) ) { if ( packet.packet.stream_index == video_stream_id ) {
if ( packet.keyframe ) {
packetqueue.clearQueue( this->GetPreEventCount(), video_stream_id ); packetqueue.clearQueue( this->GetPreEventCount(), video_stream_id );
packetqueue.queuePacket( &packet );
} else if ( packetqueue.size() ) {
// it's a keyframe or we already have something in the queue
packetqueue.queuePacket( &packet );
} }
} else if ( packet.packet.stream_index == camera->get_AudioStreamId() ) {
// The following lines should ensure that the queue always begins with a video keyframe // The following lines should ensure that the queue always begins with a video keyframe
if ( packet.packet.stream_index == camera->get_AudioStreamId() ) {
//Debug(2, "Have audio packet, reocrd_audio is (%d) and packetqueue.size is (%d)", record_audio, packetqueue.size() ); //Debug(2, "Have audio packet, reocrd_audio is (%d) and packetqueue.size is (%d)", record_audio, packetqueue.size() );
if ( record_audio && packetqueue.size() ) { if ( record_audio && packetqueue.size() ) {
// if it's audio, and we are doing audio, and there is already something in the queue // if it's audio, and we are doing audio, and there is already something in the queue
packetqueue.queuePacket( &packet ); packetqueue.queuePacket( &packet );
} }
} else if ( packet.packet.stream_index == video_stream_id ) { } else {
if ( packet.keyframe || packetqueue.size() ) // it's a keyframe or we already have something in the queue Debug(2,"Unknown stream");
packetqueue.queuePacket( &packet );
} // end if audio or video } // end if audio or video
} // end if recording or not } // end if recording or not
if ( videoStore ) { if ( videoStore ) {
Debug(2, "Writing packet");
//Write the packet to our video store, it will be smart enough to know what to do //Write the packet to our video store, it will be smart enough to know what to do
int ret = videoStore->writePacket( &packet ); int ret = videoStore->writePacket( &packet );
if ( ret < 0 ) { //Less than zero and we skipped a frame if ( ret < 0 ) { //Less than zero and we skipped a frame
@ -2962,6 +2975,8 @@ Debug(2,"New videostore");
} }
} // end if de-interlacing or not } // end if de-interlacing or not
if ( deinterlacing_value ) {
Debug(2,"Deinterlace");
/* Deinterlacing */ /* Deinterlacing */
if ( deinterlacing_value == 1 ) { if ( deinterlacing_value == 1 ) {
capture_image->Deinterlace_Discard(); capture_image->Deinterlace_Discard();
@ -2974,8 +2989,10 @@ Debug(2,"New videostore");
} else if ( deinterlacing_value == 5 ) { } else if ( deinterlacing_value == 5 ) {
capture_image->Deinterlace_Blend_CustomRatio( (deinterlacing>>8)&0xff ); capture_image->Deinterlace_Blend_CustomRatio( (deinterlacing>>8)&0xff );
} }
}
if ( orientation != ROTATE_0 ) { if ( orientation != ROTATE_0 ) {
Debug(2,"Rotate");
switch ( orientation ) { switch ( orientation ) {
case ROTATE_0 : { case ROTATE_0 : {
// No action required // No action required
@ -3006,13 +3023,17 @@ Debug(2,"New videostore");
} }
} }
if ( privacy_bitmask ) if ( privacy_bitmask ) {
Debug(2,"privacy");
capture_image->MaskPrivacy( privacy_bitmask ); capture_image->MaskPrivacy( privacy_bitmask );
}
gettimeofday( image_buffer[index].timestamp, NULL ); gettimeofday( image_buffer[index].timestamp, NULL );
if ( config.timestamp_on_capture ) { if ( config.timestamp_on_capture ) {
Debug(2,"Timestamping");
TimestampImage( capture_image, image_buffer[index].timestamp ); TimestampImage( capture_image, image_buffer[index].timestamp );
} }
Debug(2,"Check signal");
shared_data->signal = CheckSignal(capture_image); shared_data->signal = CheckSignal(capture_image);
shared_data->last_write_index = index; shared_data->last_write_index = index;
shared_data->last_write_time = image_buffer[index].timestamp->tv_sec; shared_data->last_write_time = image_buffer[index].timestamp->tv_sec;
@ -3039,7 +3060,7 @@ Debug(2,"New videostore");
Error( "Can't run query: %s", mysql_error( &dbconn ) ); Error( "Can't run query: %s", mysql_error( &dbconn ) );
} }
} }
} } // end if report fps
// Icon: I'm not sure these should be here. They have nothing to do with capturing // Icon: I'm not sure these should be here. They have nothing to do with capturing
if ( shared_data->action & GET_SETTINGS ) { if ( shared_data->action & GET_SETTINGS ) {

View File

@ -269,7 +269,8 @@ int main(int argc, char *argv[]) {
struct timeval now; struct timeval now;
struct DeltaTimeval delta_time; struct DeltaTimeval delta_time;
while ( !zm_terminate ) { while ( !zm_terminate ) {
sigprocmask(SIG_BLOCK, &block_set, 0); Debug(2,"blocking");
//sigprocmask(SIG_BLOCK, &block_set, 0);
for ( int i = 0; i < n_monitors; i++ ) { for ( int i = 0; i < n_monitors; i++ ) {
long min_delay = MAXINT; long min_delay = MAXINT;
@ -316,14 +317,18 @@ int main(int argc, char *argv[]) {
DELTA_TIMEVAL(delta_time, now, last_capture_times[i], DT_PREC_3); DELTA_TIMEVAL(delta_time, now, last_capture_times[i], DT_PREC_3);
long sleep_time = next_delays[i]-delta_time.delta; long sleep_time = next_delays[i]-delta_time.delta;
if ( sleep_time > 0 ) { if ( sleep_time > 0 ) {
Debug(2,"usleeping (%d)", sleep_time*(DT_MAXGRAN/DT_PREC_3) );
usleep(sleep_time*(DT_MAXGRAN/DT_PREC_3)); usleep(sleep_time*(DT_MAXGRAN/DT_PREC_3));
} }
} last_capture_times[i] = now;
} else {
gettimeofday(&(last_capture_times[i]), NULL); gettimeofday(&(last_capture_times[i]), NULL);
}
} // end if next_delay <= min_delay || next_delays[i] <= 0 ) } // end if next_delay <= min_delay || next_delays[i] <= 0 )
} // end foreach n_monitors } // end foreach n_monitors
sigprocmask(SIG_UNBLOCK, &block_set, 0); Debug(2,"unblocking");
//sigprocmask(SIG_UNBLOCK, &block_set, 0);
} // end while ! zm_terminate } // end while ! zm_terminate
for ( int i = 0; i < n_monitors; i++ ) { for ( int i = 0; i < n_monitors; i++ ) {
if ( analysis_threads[i] ) { if ( analysis_threads[i] ) {