diff --git a/src/zm_monitor.cpp b/src/zm_monitor.cpp index cfcd3dcb7..37cd48567 100644 --- a/src/zm_monitor.cpp +++ b/src/zm_monitor.cpp @@ -353,6 +353,7 @@ Monitor::Monitor( event_count = 0; image_count = 0; analysis_image_count = 0; + deinterlacing_value = deinterlacing & 0xff; // How many frames we need to have before we start analysing ready_count = warmup_count; @@ -564,7 +565,7 @@ bool Monitor::connect() { image_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]) ); image_buffer[i].image->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */ } - if ( (deinterlacing & 0xff) == 4) { + if ( deinterlacing_value == 4 ) { /* Four field motion adaptive deinterlacing in use */ /* Allocate a buffer for the next image */ next_buffer.image = new Image( width, height, camera->Colours(), camera->SubpixelOrder()); @@ -600,7 +601,7 @@ Monitor::~Monitor() { closeEvent(); } - if ( (deinterlacing & 0xff) == 4) { + if ( deinterlacing_value == 4 ) { delete next_buffer.image; } #if 1 @@ -2764,7 +2765,6 @@ Monitor *Monitor::Load( unsigned int p_id, bool load_zones, Purpose purpose ) { int Monitor::Capture() { static int FirstCapture = 1; // Used in de-interlacing to indicate whether this is the even or odd image - GetLastEventId(); unsigned int index = image_count % image_buffer_count; if ( (index == shared_data->last_read_index) && (function > MONITOR) ) { @@ -2776,6 +2776,8 @@ int Monitor::Capture() { Warning( "Last image read from shared memory %ld seconds ago, zma may have gone away", last_read_delta ) shared_data->last_read_index = image_buffer_count; } + } else { + Debug(2,"Current write index %d, last read index %d, current (%d)", shared_data->last_write_index, shared_data->last_read_index, index ); } ZMPacket *packet = &image_buffer[index]; @@ -2784,7 +2786,6 @@ int Monitor::Capture() { Image* capture_image = packet->image; int captureResult = 0; - unsigned int deinterlacing_value = deinterlacing & 0xff; if ( deinterlacing_value == 4 ) { if ( FirstCapture != 1 ) { /* Copy the next image into the shared memory */ @@ -2919,7 +2920,7 @@ int Monitor::Capture() { if ( now != last_fps_time ) { // # of images per interval / the amount of time it took capture_fps = double(fps_report_interval)/(now-last_fps_time); - Info( "%d -> %d -> %d", fps_report_interval, now, last_fps_time ); + //Info( "%d -> %d -> %d", fps_report_interval, now, last_fps_time ); //Info( "%d -> %d -> %lf -> %lf", now-last_fps_time, fps_report_interval/(now-last_fps_time), double(fps_report_interval)/(now-last_fps_time), fps ); Info( "%s: %d - Capturing at %.2lf fps", name, image_count, capture_fps ); last_fps_time = now; diff --git a/src/zm_monitor.h b/src/zm_monitor.h index 13a215e48..bdddac4ec 100644 --- a/src/zm_monitor.h +++ b/src/zm_monitor.h @@ -232,6 +232,7 @@ protected: unsigned int v4l_captures_per_frame; Orientation orientation; // Whether the image has to be rotated at all unsigned int deinterlacing; + unsigned int deinterlacing_value; bool videoRecording; int savejpegspref; diff --git a/src/zm_packetqueue.cpp b/src/zm_packetqueue.cpp index b632b8730..15383a703 100644 --- a/src/zm_packetqueue.cpp +++ b/src/zm_packetqueue.cpp @@ -21,12 +21,9 @@ #include "zm_ffmpeg.h" #include -#define VIDEO_QUEUESIZE 200 -#define AUDIO_QUEUESIZE 50 - -zm_packetqueue::zm_packetqueue( unsigned int video_image_count, int p_video_stream_id ) { +zm_packetqueue::zm_packetqueue( int video_image_count, int p_video_stream_id ) { video_stream_id = p_video_stream_id; - max_video_packet_count = video_image_count; + max_video_packet_count = video_image_count-1; video_packet_count = 0; analysis_it = pktQueue.begin(); } @@ -39,7 +36,7 @@ bool zm_packetqueue::queuePacket( ZMPacket* zm_packet ) { pktQueue.push_back( zm_packet ); if ( zm_packet->codec_type == AVMEDIA_TYPE_VIDEO ) { video_packet_count += 1; - if ( video_packet_count > max_video_packet_count ) + if ( video_packet_count >= max_video_packet_count ) clearQueue( max_video_packet_count, video_stream_id ); } diff --git a/src/zm_packetqueue.h b/src/zm_packetqueue.h index 2ef5ba9fa..a3a4a5ee0 100644 --- a/src/zm_packetqueue.h +++ b/src/zm_packetqueue.h @@ -37,10 +37,10 @@ class zm_packetqueue { int video_stream_id; int video_packet_count; // keep track of how many video packets we have, because we shouldn't have more than image_buffer_count - unsigned int max_video_packet_count; + int max_video_packet_count; // allow a negative value to someday mean unlimited public: - zm_packetqueue( unsigned int p_max_video_packet_count, int p_video_stream_id ); + zm_packetqueue( int p_max_video_packet_count, int p_video_stream_id ); virtual ~zm_packetqueue(); bool queuePacket( ZMPacket* packet ); ZMPacket * popPacket( ); diff --git a/src/zm_videostore.cpp b/src/zm_videostore.cpp index 86b6566a2..4685bc3d8 100644 --- a/src/zm_videostore.cpp +++ b/src/zm_videostore.cpp @@ -141,11 +141,13 @@ bool VideoStore::open() { } if ( monitor->OutputCodec() == "mjpeg" ) { +Debug(2,"Using mjpeg"); video_out_codec = avcodec_find_encoder_by_name("mjpeg"); if ( ! video_out_codec ) { - Debug(1, "Didn't find omx"); + Debug(1, "Didn't find mjpeg encoder"); video_out_codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG); } + video_out_ctx = avcodec_alloc_context3( video_out_codec ); video_out_ctx->codec_id = video_out_codec->id; video_out_ctx->pix_fmt = AV_PIX_FMT_YUVJ422P; @@ -919,10 +921,10 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { // if we have to transcode if ( video_out_ctx->codec_id != video_in_ctx->codec_id ) { - Debug(3, "Have encoding video frame count (%d)", frame_count); + //Debug(3, "Have encoding video frame count (%d)", frame_count); if ( ! zm_packet->out_frame ) { - Debug(3, "Have no out frame"); + //Debug(3, "Have no out frame"); AVFrame *out_frame = zm_packet->out_frame = zm_av_frame_alloc(); if ( ! out_frame ) { Error("Unable to allocate a frame"); @@ -961,13 +963,11 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { out_frame->height = video_out_ctx->height; out_frame->format = video_out_ctx->pix_fmt; //out_frame->pkt_duration = 0; - out_frame->coded_picture_number = frame_count; - out_frame->display_picture_number = frame_count; if ( ! zm_packet->in_frame ) { - Debug(2,"Have no in_frame"); + //Debug(2,"Have no in_frame"); if ( zm_packet->packet.size ) { - Debug(2,"Decoding"); + //Debug(2,"Decoding"); if ( ! zm_packet->decode( video_in_ctx ) ) { Debug(2, "unable to decode yet."); return 0; @@ -975,7 +975,7 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { //Go straight to out frame swscale.Convert( zm_packet->in_frame, out_frame ); } else if ( zm_packet->image ) { - Debug(2,"Have an image, convert it"); + //Debug(2,"Have an image, convert it"); //Go straight to out frame swscale.Convert( zm_packet->image, @@ -997,26 +997,31 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { } // end if no in_frame } // end if no out_frame + zm_packet->out_frame->coded_picture_number = frame_count; + zm_packet->out_frame->display_picture_number = frame_count; + zm_packet->out_frame->sample_aspect_ratio = (AVRational){ 0, 1 }; + if ( ! video_last_pts ) { video_last_pts = zm_packet->timestamp->tv_sec*1000000 + zm_packet->timestamp->tv_usec; Debug(2, "No video_lsat_pts, set to (%" PRId64 ") secs(%d) usecs(%d)", video_last_pts, zm_packet->timestamp->tv_sec, zm_packet->timestamp->tv_usec ); zm_packet->out_frame->pts = 0; } else { + //uint64_t seconds = zm_packet->timestamp->tv_sec*1000000; zm_packet->out_frame->pts = ( zm_packet->timestamp->tv_sec*1000000 + zm_packet->timestamp->tv_usec ) - video_last_pts; - Debug(2, " Setting pts, set to (%" PRId64 ") from (%" PRIu64 " - secs(%d) usecs(%d)", - zm_packet->out_frame->pts, video_last_pts, zm_packet->timestamp->tv_sec, zm_packet->timestamp->tv_usec ); + Debug(2, " Setting pts for frame(%d), set to (%" PRId64 ") from (%" PRId64 " - secs(%d) usecs(%d)", + frame_count, zm_packet->out_frame->pts, video_last_pts, zm_packet->timestamp->tv_sec, zm_packet->timestamp->tv_usec ); } if ( zm_packet->keyframe ) { - Debug(2, "Setting keyframe was (%d)", zm_packet->out_frame->key_frame ); + //Debug(2, "Setting keyframe was (%d)", zm_packet->out_frame->key_frame ); zm_packet->out_frame->key_frame = 1; - Debug(2, "Setting keyframe (%d)", zm_packet->out_frame->key_frame ); + //Debug(2, "Setting keyframe (%d)", zm_packet->out_frame->key_frame ); } else { Debug(2, "Not Setting keyframe"); } - // Do this to allow the encoder to choose whether to use I/P/B frame #if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0) + // Do this to allow the encoder to choose whether to use I/P/B frame zm_packet->out_frame->pict_type = AV_PICTURE_TYPE_NONE; if ( (ret = avcodec_send_frame(video_out_ctx, zm_packet->out_frame)) < 0 ) { Error("Could not send frame (error '%s')", av_make_error_string(ret).c_str()); @@ -1024,6 +1029,8 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { } av_init_packet(&opkt); + opkt.data = NULL; + opkt.size = 0; if ( (ret = avcodec_receive_packet(video_out_ctx, &opkt)) < 0 ) { zm_av_packet_unref(&opkt); if ( AVERROR(EAGAIN) == ret ) { @@ -1037,6 +1044,7 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { } return -1; } +//Debug(2, "Got packet using receive_packet, dts:%" PRId64 ", pts:%" PRId64 ", keyframe:%d", opkt.dts, opkt.pts, opkt.flags & AV_PKT_FLAG_KEY ); #else av_init_packet(&opkt); int data_present; @@ -1053,8 +1061,8 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { return 0; } #endif - opkt.dts = opkt.pts; - opkt.duration = 0; + //opkt.dts = opkt.pts; + //opkt.duration = 0; } else { AVPacket *ipkt = &zm_packet->packet; @@ -1073,7 +1081,7 @@ int VideoStore::writeVideoFramePacket( ZMPacket * zm_packet ) { } opkt.duration = 0; - Debug(3, "dts:%" PRId64 ", pts:%" PRId64 ", keyframe:%d", opkt.dts, opkt.pts, opkt.flags & AV_PKT_FLAG_KEY ); + Debug(3, "dts:%" PRId64 ", pts:%" PRId64 ", duration:%" PRId64 ", keyframe:%d", opkt.dts, opkt.pts, opkt.duration, opkt.flags & AV_PKT_FLAG_KEY ); write_video_packet( opkt ); zm_av_packet_unref(&opkt); @@ -1090,7 +1098,7 @@ void VideoStore::write_video_packet( AVPacket &opkt ) { opkt.dts = opkt.pts; } - opkt.pos = -1; + //opkt.pos = -1; opkt.stream_index = video_out_stream->index; //video_next_dts += opkt.duration; diff --git a/src/zmc.cpp b/src/zmc.cpp index ed7ae5c91..5110827bf 100644 --- a/src/zmc.cpp +++ b/src/zmc.cpp @@ -254,6 +254,7 @@ int main(int argc, char *argv[]) { last_capture_times[i].tv_sec = last_capture_times[i].tv_usec = 0; capture_delays[i] = monitors[i]->GetCaptureDelay(); alarm_capture_delays[i] = monitors[i]->GetAlarmCaptureDelay(); + Debug(2, "capture delay(%l) alarm delay(%l)", capture_delays[i], alarm_capture_delays[i] ); Monitor::Function function = monitors[0]->GetFunction(); if ( function == Monitor::MODECT || function == Monitor::MOCORD || function == Monitor::RECORD) { @@ -278,10 +279,11 @@ int main(int argc, char *argv[]) { for ( int j = 0; j < n_monitors; j++ ) { if ( last_capture_times[j].tv_sec ) { DELTA_TIMEVAL(delta_time, now, last_capture_times[j], DT_PREC_3); + // capture_delay is the amount of time we should sleep to achieve the desired framerate. if ( monitors[i]->GetState() == Monitor::ALARM ) - next_delays[j] = alarm_capture_delays[j]-delta_time.delta; + next_delays[j] = alarm_capture_delays[j] - delta_time.delta; else - next_delays[j] = capture_delays[j]-delta_time.delta; + next_delays[j] = capture_delays[j] - delta_time.delta; if ( next_delays[j] < 0 ) next_delays[j] = 0; } else {