Switch ZMPacket * to a shared_ptr<ZMPacket>. This is so that in LockedPacket we can unlock and then notify and be confident that packet_ won't have been deleted. Change ZMPacket->timestamp to be a timeval instead of timeval *. This might not have been necessary but I like it. No longer cuse the ZMPacket object to wrap the shared image buffers and timestamps. Use a vector for image_buffers.

This commit is contained in:
Isaac Connor 2021-05-08 21:14:20 -04:00
parent 9ee24170a9
commit 2cf6ad8089
30 changed files with 253 additions and 487 deletions

@ -1 +1 @@
Subproject commit cd7fd49becad6010a1b8466bfebbd93999a39878
Subproject commit d714323e693ba106be6af363295d950f50ca15e0

View File

@ -24,6 +24,8 @@
#include <sys/ioctl.h>
#include <sys/types.h>
#include <memory>
class Monitor;
class ZMPacket;
@ -133,7 +135,7 @@ public:
virtual int PrimeCapture() { return 0; }
virtual int PreCapture() = 0;
virtual int Capture(ZMPacket &p) = 0;
virtual int Capture(std::shared_ptr<ZMPacket> &p) = 0;
virtual int PostCapture() = 0;
virtual int Close() = 0;
};

View File

@ -285,7 +285,7 @@ void Event::createNotes(std::string &notes) {
bool Event::WriteFrameImage(
Image *image,
struct timeval timestamp,
timeval timestamp,
const char *event_file,
bool alarm_frame) const {
@ -299,7 +299,7 @@ bool Event::WriteFrameImage(
// stash the image we plan to use in another pointer regardless if timestamped.
// exif is only timestamp at present this switches on or off for write
Image *ts_image = new Image(*image);
monitor->TimestampImage(ts_image, &timestamp);
monitor->TimestampImage(ts_image, timestamp);
rc = ts_image->WriteJpeg(event_file, thisquality,
(monitor->Exif() ? timestamp : (timeval){0,0}));
delete(ts_image);
@ -311,9 +311,8 @@ bool Event::WriteFrameImage(
return rc;
} // end Event::WriteFrameImage( Image *image, struct timeval timestamp, const char *event_file, bool alarm_frame )
bool Event::WritePacket(ZMPacket &packet) {
if ( videoStore->writePacket(&packet) < 0 )
bool Event::WritePacket(const std::shared_ptr<ZMPacket>&packet) {
if (videoStore->writePacket(packet) < 0)
return false;
return true;
} // bool Event::WriteFrameVideo
@ -420,7 +419,7 @@ void Event::updateNotes(const StringSetMap &newNoteSetMap) {
} // end if update
} // void Event::updateNotes(const StringSetMap &newNoteSetMap)
void Event::AddPacket(ZMPacket *packet) {
void Event::AddPacket(const std::shared_ptr<ZMPacket>&packet) {
have_video_keyframe = have_video_keyframe ||
( ( packet->codec_type == AVMEDIA_TYPE_VIDEO ) &&
@ -437,8 +436,8 @@ void Event::AddPacket(ZMPacket *packet) {
//FIXME if it fails, we should write a jpeg
}
if ((packet->codec_type == AVMEDIA_TYPE_VIDEO) or packet->image)
AddFrame(packet->image, *(packet->timestamp), packet->zone_stats, packet->score, packet->analysis_image);
end_time = *packet->timestamp;
AddFrame(packet->image, packet->timestamp, packet->zone_stats, packet->score, packet->analysis_image);
end_time = packet->timestamp;
return;
}

View File

@ -111,8 +111,8 @@ class Event {
const struct timeval &StartTime() const { return start_time; }
const struct timeval &EndTime() const { return end_time; }
void AddPacket(ZMPacket *p);
bool WritePacket(ZMPacket &p);
void AddPacket(const std::shared_ptr<ZMPacket> &p);
bool WritePacket(const std::shared_ptr<ZMPacket> &p);
bool SendFrameImage(const Image *image, bool alarm_frame=false);
bool WriteFrameImage(
Image *image,

View File

@ -189,7 +189,7 @@ int FfmpegCamera::PreCapture() {
return 0;
}
int FfmpegCamera::Capture(ZMPacket &zm_packet) {
int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
if (!mCanCapture) return -1;
start_read_time = time(nullptr);
@ -236,14 +236,14 @@ int FfmpegCamera::Capture(ZMPacket &zm_packet) {
ZM_DUMP_STREAM_PACKET(stream, packet, "ffmpeg_camera in");
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
zm_packet.codec_type = stream->codecpar->codec_type;
zm_packet->codec_type = stream->codecpar->codec_type;
#else
zm_packet.codec_type = stream->codec->codec_type;
zm_packet->codec_type = stream->codec->codec_type;
#endif
bytes += packet.size;
zm_packet.set_packet(&packet);
zm_packet.stream = stream;
zm_packet.pts = av_rescale_q(packet.pts, stream->time_base, AV_TIME_BASE_Q);
zm_packet->set_packet(&packet);
zm_packet->stream = stream;
zm_packet->pts = av_rescale_q(packet.pts, stream->time_base, AV_TIME_BASE_Q);
if ( packet.pts != AV_NOPTS_VALUE ) {
if ( stream == mVideoStream ) {
if (mFirstVideoPTS == AV_NOPTS_VALUE)

View File

@ -22,6 +22,8 @@
#include "zm_camera.h"
#include <memory>
#if HAVE_LIBAVUTIL_HWCONTEXT_H
typedef struct DecodeContext {
AVBufferRef *hw_device_ref;
@ -93,7 +95,7 @@ class FfmpegCamera : public Camera {
int PrimeCapture() override;
int PreCapture() override;
int Capture(ZMPacket &p) override;
int Capture(std::shared_ptr<ZMPacket> &p) override;
int PostCapture() override;
private:
static int FfmpegInterruptCallback(void*ctx);

View File

@ -87,8 +87,8 @@ int FileCamera::PreCapture() {
return 0;
}
int FileCamera::Capture( ZMPacket &zm_packet ) {
return zm_packet.image->ReadJpeg(path, colours, subpixelorder) ? 1 : -1;
int FileCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
return zm_packet->image->ReadJpeg(path, colours, subpixelorder) ? 1 : -1;
}
int FileCamera::PostCapture() {

View File

@ -51,7 +51,7 @@ public:
void Initialise();
void Terminate();
int PreCapture() override;
int Capture(ZMPacket &p) override;
int Capture(std::shared_ptr<ZMPacket> &p) override;
int PostCapture() override;
int Close() override { return 0; };
};

View File

@ -2036,16 +2036,16 @@ int LocalCamera::PreCapture() {
return 1;
}
int LocalCamera::Capture(ZMPacket &zm_packet) {
int LocalCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
// We assume that the avpacket is allocated, and just needs to be filled
static uint8_t* buffer = nullptr;
int buffer_bytesused = 0;
int capture_frame = -1;
int captures_per_frame = 1;
if ( channel_count > 1 )
if (channel_count > 1)
captures_per_frame = v4l_captures_per_frame;
if ( captures_per_frame <= 0 ) {
if (captures_per_frame <= 0) {
captures_per_frame = 1;
Warning("Invalid Captures Per Frame setting: %d", captures_per_frame);
}
@ -2191,22 +2191,22 @@ int LocalCamera::Capture(ZMPacket &zm_packet) {
} /* prime capture */
if (!zm_packet.image) {
if (!zm_packet->image) {
Debug(4, "Allocating image");
zm_packet.image = new Image(width, height, colours, subpixelorder);
zm_packet->image = new Image(width, height, colours, subpixelorder);
}
if ( conversion_type != 0 ) {
if (conversion_type != 0) {
Debug(3, "Performing format conversion %d", conversion_type);
/* Request a writeable buffer of the target image */
uint8_t *directbuffer = zm_packet.image->WriteBuffer(width, height, colours, subpixelorder);
if ( directbuffer == nullptr ) {
uint8_t *directbuffer = zm_packet->image->WriteBuffer(width, height, colours, subpixelorder);
if (directbuffer == nullptr) {
Error("Failed requesting writeable buffer for the captured image.");
return -1;
}
#if HAVE_LIBSWSCALE
if ( conversion_type == 1 ) {
if (conversion_type == 1) {
Debug(9, "Calling sws_scale to perform the conversion");
/* Use swscale to convert the image directly into the shared memory */
#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0)
@ -2236,20 +2236,20 @@ int LocalCamera::Capture(ZMPacket &zm_packet) {
// Need to store the jpeg data too
Debug(9, "Decoding the JPEG image");
/* JPEG decoding */
zm_packet.image->DecodeJpeg(buffer, buffer_bytesused, colours, subpixelorder);
zm_packet->image->DecodeJpeg(buffer, buffer_bytesused, colours, subpixelorder);
}
} else {
Debug(3, "No format conversion performed. Assigning the image");
/* No conversion was performed, the image is in the V4L buffers and needs to be copied into the shared memory */
zm_packet.image->Assign(width, height, colours, subpixelorder, buffer, imagesize);
zm_packet->image->Assign(width, height, colours, subpixelorder, buffer, imagesize);
} // end if doing conversion or not
zm_packet.packet.stream_index = mVideoStreamId;
zm_packet.stream = mVideoStream;
zm_packet.codec_type = AVMEDIA_TYPE_VIDEO;
zm_packet.keyframe = 1;
zm_packet->packet.stream_index = mVideoStreamId;
zm_packet->stream = mVideoStream;
zm_packet->codec_type = AVMEDIA_TYPE_VIDEO;
zm_packet->keyframe = 1;
return 1;
} // end int LocalCamera::Capture()

View File

@ -151,7 +151,7 @@ public:
int PrimeCapture() override;
int PreCapture() override;
int Capture(ZMPacket &p) override;
int Capture(std::shared_ptr<ZMPacket> &p) override;
int PostCapture() override;
int Close() override { return 0; };

View File

@ -996,12 +996,10 @@ bool Monitor::connect() {
if (!camera) LoadCamera();
Debug(3, "Allocating %d image buffers", image_buffer_count);
image_buffer = new ZMPacket[image_buffer_count];
image_buffer.reserve(image_buffer_count);
for (int32_t i = 0; i < image_buffer_count; i++) {
image_buffer[i].image_index = i;
image_buffer[i].timestamp = &(shared_timestamps[i]);
image_buffer[i].image = new Image(width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]));
image_buffer[i].image->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
image_buffer[i] = new Image(width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]));
image_buffer[i]->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
}
if (purpose == CAPTURE) {
@ -1096,16 +1094,10 @@ bool Monitor::disconnect() {
}
#endif // ZM_MEM_MAPPED
if (image_buffer) {
for ( int32_t i = 0; i < image_buffer_count; i++ ) {
// We delete the image because it is an object pointing to space that won't be free'd.
delete image_buffer[i].image;
image_buffer[i].image = nullptr;
// We don't delete the timestamp because it is just a pointer to shared mem.
image_buffer[i].timestamp = nullptr;
}
delete[] image_buffer;
image_buffer = nullptr;
delete image_buffer[i];
image_buffer[i] = nullptr;
}
return true;
@ -1177,19 +1169,18 @@ int Monitor::GetImage(int32_t index, int scale) {
Image *image;
// If we are going to be modifying the snapshot before writing, then we need to copy it
if ( ( scale != ZM_SCALE_BASE ) || ( !config.timestamp_on_capture ) ) {
ZMPacket *snap = &image_buffer[index];
alarm_image.Assign(*snap->image);
alarm_image.Assign(*image_buffer[index]);
if ( scale != ZM_SCALE_BASE ) {
if (scale != ZM_SCALE_BASE) {
alarm_image.Scale(scale);
}
if ( !config.timestamp_on_capture ) {
TimestampImage(&alarm_image, snap->timestamp);
TimestampImage(&alarm_image, shared_timestamps[index]);
}
image = &alarm_image;
} else {
image = image_buffer[index].image;
image = image_buffer[index];
}
static char filename[PATH_MAX];
@ -1206,15 +1197,15 @@ ZMPacket *Monitor::getSnapshot(int index) const {
if ( (index < 0) || (index > image_buffer_count) ) {
index = shared_data->last_write_index;
}
return &image_buffer[index];
return new ZMPacket(image_buffer[index], shared_timestamps[index]);
return nullptr;
}
struct timeval Monitor::GetTimestamp(int index) const {
ZMPacket *packet = getSnapshot(index);
if ( packet )
return *packet->timestamp;
if (packet)
return packet->timestamp;
static struct timeval null_tv = { 0, 0 };
return null_tv;
@ -1235,58 +1226,6 @@ uint64_t Monitor::GetLastEventId() const {
// This function is crap.
double Monitor::GetFPS() const {
return get_capture_fps();
// last_write_index is the last capture index. It starts as == image_buffer_count so that the first asignment % image_buffer_count = 0;
int32_t index1 = shared_data->last_write_index;
if ( index1 >= image_buffer_count ) {
// last_write_index only has this value on startup before capturing anything.
return 0.0;
}
Debug(2, "index1(%d)", index1);
ZMPacket *snap1 = &image_buffer[index1];
if ( !snap1->timestamp->tv_sec ) {
// This should be impossible
Warning("Impossible situation. No timestamp on captured image index was %d, image-buffer_count was (%d)", index1, image_buffer_count);
return 0.0;
}
struct timeval time1 = *snap1->timestamp;
int32_t fps_image_count = image_buffer_count;
int32_t index2 = (index1+1)%image_buffer_count;
Debug(2, "index2(%d)", index2);
ZMPacket *snap2 = &image_buffer[index2];
// the timestamp pointers are initialized on connection, so that's redundant
// tv_sec is probably only zero during the first loop of capturing, so this basically just counts the unused images.
// The problem is that there is no locking, and we set the timestamp before we set last_write_index,
// so there is a small window where the next image can have a timestamp in the future
while ( !snap2->timestamp->tv_sec || tvDiffSec(*snap2->timestamp, *snap1->timestamp) < 0 ) {
if ( index1 == index2 ) {
// All images are uncaptured
return 0.0;
}
index2 = (index2+1)%image_buffer_count;
snap2 = &image_buffer[ index2 ];
fps_image_count--;
}
struct timeval time2 = *snap2->timestamp;
double time_diff = tvDiffSec( time2, time1 );
if ( ! time_diff ) {
Error("No diff between time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d",
time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count);
return 0.0;
}
double curr_fps = fps_image_count/time_diff;
if ( curr_fps < 0.0 ) {
Error("Negative FPS %f, time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d",
curr_fps, time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count);
return 0.0;
} else {
Debug(2, "GetFPS %f, time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d",
curr_fps, time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count);
}
return curr_fps;
}
/* I think this returns the # of micro seconds that we should sleep in order to maintain the desired analysis rate */
@ -1772,7 +1711,7 @@ bool Monitor::Analyse() {
// get_analysis_packet will lock the packet and may wait if analysis_it is at the end
ZMLockedPacket *packet_lock = packetqueue.get_packet(analysis_it);
if (!packet_lock) return false;
ZMPacket *snap = packet_lock->packet_;
std::shared_ptr<ZMPacket> snap = packet_lock->packet_;
// Is it possible for snap->score to be ! -1 ? Not if everything is working correctly
if (snap->score != -1) {
@ -1908,7 +1847,7 @@ bool Monitor::Analyse() {
}
} // end if decoding enabled
struct timeval *timestamp = snap->timestamp;
struct timeval *timestamp = &snap->timestamp;
if (Active() and (function == MODECT or function == MOCORD)) {
Debug(3, "signal and active and modect");
@ -2004,7 +1943,7 @@ bool Monitor::Analyse() {
// This gets a lock on the starting packet
ZMLockedPacket *starting_packet_lock = nullptr;
ZMPacket *starting_packet = nullptr;
std::shared_ptr<ZMPacket> starting_packet = nullptr;
if (*start_it != snap_it) {
starting_packet_lock = packetqueue.get_packet(start_it);
if (!starting_packet_lock) {
@ -2017,7 +1956,7 @@ bool Monitor::Analyse() {
starting_packet = snap;
}
event = new Event(this, *(starting_packet->timestamp), "Continuous", noteSetMap);
event = new Event(this, starting_packet->timestamp, "Continuous", noteSetMap);
// Write out starting packets, do not modify packetqueue it will garbage collect itself
while (starting_packet and ((*start_it) != snap_it)) {
event->AddPacket(starting_packet);
@ -2106,7 +2045,7 @@ bool Monitor::Analyse() {
(pre_event_count > alarm_frame_count ? pre_event_count : alarm_frame_count)
);
ZMLockedPacket *starting_packet_lock = nullptr;
ZMPacket *starting_packet = nullptr;
std::shared_ptr<ZMPacket> starting_packet = nullptr;
if (*start_it != snap_it) {
starting_packet_lock = packetqueue.get_packet(start_it);
if (!starting_packet_lock) return false;
@ -2115,7 +2054,7 @@ bool Monitor::Analyse() {
starting_packet = snap;
}
event = new Event(this, *(starting_packet->timestamp), cause, noteSetMap);
event = new Event(this, starting_packet->timestamp, cause, noteSetMap);
shared_data->last_event_id = event->Id();
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
video_store_data->recording = event->StartTime();
@ -2522,13 +2461,17 @@ std::vector<std::shared_ptr<Monitor>> Monitor::LoadFfmpegMonitors(const char *fi
int Monitor::Capture() {
unsigned int index = image_count % image_buffer_count;
ZMPacket *packet = new ZMPacket();
packet->timestamp = new struct timeval;
Debug(1, "Packeet");
std::shared_ptr<ZMPacket> packet = std::make_shared<ZMPacket>();
//= new ZMPacket();
//packet->timestamp = new struct timeval;
packet->image_index = image_count;
gettimeofday(packet->timestamp, nullptr);
shared_data->zmc_heartbeat_time = packet->timestamp->tv_sec;
int captureResult = camera->Capture(*packet);
Debug(1, "Packeet");
gettimeofday(&(packet->timestamp), nullptr);
Debug(1, "Packeet");
shared_data->zmc_heartbeat_time = packet->timestamp.tv_sec;
Debug(1, "Capturing");
int captureResult = camera->Capture(packet);
Debug(4, "Back from capture result=%d image count %d", captureResult, image_count);
if (captureResult < 0) {
@ -2543,24 +2486,24 @@ int Monitor::Capture() {
capture_image->Fill(signalcolor);
shared_data->signal = false;
shared_data->last_write_index = index;
shared_data->last_write_time = image_buffer[index].timestamp->tv_sec;
image_buffer[index].image->Assign(*capture_image);
*(image_buffer[index].timestamp) = *(packet->timestamp);
shared_data->last_write_time = shared_timestamps[index].tv_sec;
image_buffer[index]->Assign(*capture_image);
shared_timestamps[index] = packet->timestamp;
delete capture_image;
image_count++;
delete packet;
//delete packet;
// What about timestamping it?
// Don't want to do analysis on it, but we won't due to signal
return -1;
} else if ( captureResult > 0 ) {
} else if (captureResult > 0) {
shared_data->signal = true; // Assume if getting packets that we are getting something useful. CheckSignalPoints can correct this later.
// If we captured, let's assume signal, Decode will detect further
if (!decoding_enabled) {
shared_data->last_write_index = index;
shared_data->last_write_time = packet->timestamp->tv_sec;
shared_data->last_write_time = packet->timestamp.tv_sec;
}
Debug(2, "Have packet stream_index:%d ?= videostream_id: %d q.vpktcount %d event? %d image_count %d",
packet->packet.stream_index, video_stream_id, packetqueue.packet_count(video_stream_id), ( event ? 1 : 0 ), image_count );
packet->packet.stream_index, video_stream_id, packetqueue.packet_count(video_stream_id), ( event ? 1 : 0 ), image_count);
if (packet->codec_type == AVMEDIA_TYPE_VIDEO) {
packet->packet.stream_index = video_stream_id; // Convert to packetQueue's index
@ -2590,14 +2533,14 @@ int Monitor::Capture() {
packetqueue.queuePacket(packet);
} else {
Debug(4, "Not Queueing audio packet");
delete packet;
//delete packet;
}
// Don't update last_write_index because that is used for live streaming
//shared_data->last_write_time = image_buffer[index].timestamp->tv_sec;
return 1;
} else {
Debug(1, "Unknown codec type %d", packet->codec_type);
delete packet;
//delete packet;
return 1;
} // end if audio
@ -2605,12 +2548,12 @@ int Monitor::Capture() {
// Will only be queued if there are iterators allocated in the queue.
if ( !packetqueue.queuePacket(packet) ) {
delete packet;
//delete packet;
}
UpdateCaptureFPS();
} else { // result == 0
// Question is, do we update last_write_index etc?
delete packet;
//delete packet;
return 0;
} // end if result
@ -2635,7 +2578,7 @@ int Monitor::Capture() {
bool Monitor::Decode() {
ZMLockedPacket *packet_lock = packetqueue.get_packet(decoder_it);
if (!packet_lock) return false;
ZMPacket *packet = packet_lock->packet_;
std::shared_ptr<ZMPacket> packet = packet_lock->packet_;
packetqueue.increment_it(decoder_it);
if (packet->codec_type != AVMEDIA_TYPE_VIDEO) {
Debug(4, "Not video");
@ -2758,25 +2701,25 @@ bool Monitor::Decode() {
TimestampImage(packet->image, packet->timestamp);
}
image_buffer[index].image->Assign(*(packet->image));
*(image_buffer[index].timestamp) = *(packet->timestamp);
image_buffer[index]->Assign(*(packet->image));
shared_timestamps[index] = packet->timestamp;
} // end if have image
packet->decoded = true;
shared_data->signal = ( capture_image and signal_check_points ) ? CheckSignal(capture_image) : true;
shared_data->last_write_index = index;
shared_data->last_write_time = packet->timestamp->tv_sec;
shared_data->last_write_time = packet->timestamp.tv_sec;
packetqueue.unlock(packet_lock);
return true;
} // end bool Monitor::Decode()
void Monitor::TimestampImage(Image *ts_image, const struct timeval *ts_time) const {
void Monitor::TimestampImage(Image *ts_image, const timeval &ts_time) const {
if ( !label_format[0] )
return;
// Expand the strftime macros first
char label_time_text[256];
tm ts_tm = {};
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time->tv_sec, &ts_tm));
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time.tv_sec, &ts_tm));
char label_text[1024];
const char *s_ptr = label_time_text;
char *d_ptr = label_text;
@ -2793,7 +2736,7 @@ void Monitor::TimestampImage(Image *ts_image, const struct timeval *ts_time) con
found_macro = true;
break;
case 'f' :
d_ptr += snprintf(d_ptr, sizeof(label_text)-(d_ptr-label_text), "%02ld", ts_time->tv_usec/10000);
d_ptr += snprintf(d_ptr, sizeof(label_text)-(d_ptr-label_text), "%02ld", ts_time.tv_usec/10000);
found_macro = true;
break;
}
@ -3175,21 +3118,21 @@ void Monitor::get_ref_image() {
Debug(1, "Waiting for capture daemon lastwriteindex(%d) lastwritetime(%" PRIi64 ")",
shared_data->last_write_index, static_cast<int64>(shared_data->last_write_time));
if ( snap_lock and ! snap_lock->packet_->image ) {
if (snap_lock and ! snap_lock->packet_->image) {
delete snap_lock;
// can't analyse it anyways, incremement
packetqueue.increment_it(analysis_it);
}
//usleep(10000);
}
if ( zm_terminate )
if (zm_terminate)
return;
ZMPacket *snap = snap_lock->packet_;
std::shared_ptr<ZMPacket> snap = snap_lock->packet_;
Debug(1, "get_ref_image: packet.stream %d ?= video_stream %d, packet image id %d packet image %p",
snap->packet.stream_index, video_stream_id, snap->image_index, snap->image );
// Might not have been decoded yet FIXME
if ( snap->image ) {
if (snap->image) {
ref_image.Assign(width, height, camera->Colours(),
camera->SubpixelOrder(), snap->image->Buffer(), camera->ImageSize());
Debug(2, "Have ref image about to unlock");
@ -3197,7 +3140,7 @@ void Monitor::get_ref_image() {
Debug(2, "Have no ref image about to unlock");
}
delete snap_lock;
}
} // get_ref_image
std::vector<Group *> Monitor::Groups() {
// At the moment, only load groups once.

View File

@ -376,8 +376,7 @@ protected:
struct timeval *shared_timestamps;
unsigned char *shared_images;
ZMPacket *image_buffer;
ZMPacket next_buffer; /* Used by four field deinterlacing */
std::vector<Image *> image_buffer;
int video_stream_id; // will be filled in PrimeCapture
int audio_stream_id; // will be filled in PrimeCapture
@ -583,7 +582,7 @@ public:
bool Analyse();
bool Decode();
void DumpImage( Image *dump_image ) const;
void TimestampImage( Image *ts_image, const struct timeval *ts_time ) const;
void TimestampImage(Image *ts_image, const timeval &ts_time) const;
void closeEvent();
void Reload();

View File

@ -320,13 +320,13 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
//updateFrameRate(monitor->GetFPS());
} // end void MonitorStream::processCommand(const CmdMsg *msg)
bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
bool MonitorStream::sendFrame(const char *filepath, const timeval &timestamp) {
bool send_raw = ((scale>=ZM_SCALE_BASE)&&(zoom==ZM_SCALE_BASE));
if (
( type != STREAM_JPEG )
||
( (!config.timestamp_on_capture) && timestamp )
(!config.timestamp_on_capture)
)
send_raw = false;
@ -352,7 +352,7 @@ bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
if (
(0 > fprintf(stdout, "Content-Length: %d\r\nX-Timestamp: %d.%06d\r\n\r\n",
img_buffer_size, (int)timestamp->tv_sec, (int)timestamp->tv_usec))
img_buffer_size, (int)timestamp.tv_sec, (int)timestamp.tv_usec))
||
(fwrite(img_buffer, img_buffer_size, 1, stdout) != 1)
) {
@ -379,9 +379,9 @@ bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
return false;
} // end bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp)
bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
bool MonitorStream::sendFrame(Image *image, const timeval &timestamp) {
Image *send_image = prepareImage(image);
if ( !config.timestamp_on_capture && timestamp )
if (!config.timestamp_on_capture)
monitor->TimestampImage(send_image, timestamp);
fputs("--" BOUNDARY "\r\n", stdout);
@ -395,8 +395,8 @@ bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
static struct timeval base_time;
struct DeltaTimeval delta_time;
if ( !frame_count )
base_time = *timestamp;
DELTA_TIMEVAL(delta_time, *timestamp, base_time, DT_PREC_3);
base_time = timestamp;
DELTA_TIMEVAL(delta_time, timestamp, base_time, DT_PREC_3);
/* double pts = */ vid_stream->EncodeFrame(send_image->Buffer(), send_image->Size(), config.mpeg_timed_frames, delta_time.delta);
} else
#endif // HAVE_LIBAVCODEC
@ -437,7 +437,7 @@ bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
}
if (
( 0 > fprintf(stdout, "Content-Length: %d\r\nX-Timestamp: %d.%06d\r\n\r\n",
img_buffer_size, (int)timestamp->tv_sec, (int)timestamp->tv_usec) )
img_buffer_size, (int)timestamp.tv_sec, (int)timestamp.tv_usec) )
||
(fwrite(img_buffer, img_buffer_size, 1, stdout) != 1)
) {
@ -462,7 +462,7 @@ bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
} // Not mpeg
last_frame_sent = TV_2_FLOAT(now);
return true;
} // end bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp )
} // end bool MonitorStream::sendFrame( Image *image, const timeval &timestamp )
void MonitorStream::runStream() {
if (type == STREAM_SINGLE) {
@ -595,8 +595,8 @@ void MonitorStream::runStream() {
if ( !was_paused ) {
int index = monitor->shared_data->last_write_index % monitor->image_buffer_count;
Debug(1, "Saving paused image from index %d",index);
paused_image = new Image(*monitor->image_buffer[index].image);
paused_timestamp = *(monitor->image_buffer[index].timestamp);
paused_image = new Image(*monitor->image_buffer[index]);
paused_timestamp = monitor->shared_timestamps[index];
}
} else if ( paused_image ) {
Debug(1, "Clearing paused_image");
@ -635,7 +635,7 @@ void MonitorStream::runStream() {
if ( temp_index%frame_mod == 0 ) {
Debug(2, "Sending delayed frame %d", temp_index);
// Send the next frame
if ( ! sendFrame(temp_image_buffer[temp_index].file_name, &temp_image_buffer[temp_index].timestamp) ) {
if (!sendFrame(temp_image_buffer[temp_index].file_name, temp_image_buffer[temp_index].timestamp)) {
zm_terminate = true;
}
memcpy(&last_frame_timestamp, &(swap_image->timestamp), sizeof(last_frame_timestamp));
@ -652,7 +652,7 @@ void MonitorStream::runStream() {
// Send the next frame
if ( !sendFrame(
temp_image_buffer[temp_read_index].file_name,
&temp_image_buffer[temp_read_index].timestamp
temp_image_buffer[temp_read_index].timestamp
) ) {
zm_terminate = true;
}
@ -672,7 +672,7 @@ void MonitorStream::runStream() {
// Send keepalive
Debug(2, "Sending keepalive frame %d", temp_index);
// Send the next frame
if ( !sendFrame(temp_image_buffer[temp_index].file_name, &temp_image_buffer[temp_index].timestamp) ) {
if ( !sendFrame(temp_image_buffer[temp_index].file_name, temp_image_buffer[temp_index].timestamp) ) {
zm_terminate = true;
}
// frame_sent = true;
@ -701,21 +701,21 @@ void MonitorStream::runStream() {
index, frame_mod, frame_count, paused, delayed);
// Send the next frame
//
ZMPacket *snap = &monitor->image_buffer[index];
// Perhaps we should use NOW instead.
last_frame_timestamp = monitor->shared_timestamps[index];
Image *image = monitor->image_buffer[index];
if ( !sendFrame(snap->image, snap->timestamp) ) {
if ( !sendFrame(image, last_frame_timestamp) ) {
Debug(2, "sendFrame failed, quiting.");
zm_terminate = true;
break;
}
// Perhaps we should use NOW instead.
last_frame_timestamp = *(snap->timestamp);
//frame_sent = true;
//
if ( frame_count == 0 ) {
// Chrome will not display the first frame until it receives another.
// Firefox is fine. So just send the first frame twice.
if ( !sendFrame(snap->image, snap->timestamp) ) {
if ( !sendFrame(image, last_frame_timestamp) ) {
Debug(2, "sendFrame failed, quiting.");
zm_terminate = true;
break;
@ -730,9 +730,9 @@ void MonitorStream::runStream() {
}
if ( last_zoom != zoom ) {
Debug(2, "Sending 2 frames because change in zoom %d ?= %d", last_zoom, zoom);
if ( !sendFrame(paused_image, &paused_timestamp) )
if (!sendFrame(paused_image, paused_timestamp))
zm_terminate = true;
if ( !sendFrame(paused_image, &paused_timestamp) )
if (!sendFrame(paused_image, paused_timestamp))
zm_terminate = true;
} else {
double actual_delta_time = TV_2_FLOAT(now) - last_frame_sent;
@ -742,7 +742,7 @@ void MonitorStream::runStream() {
Debug(2, "Sending keepalive frame because delta time %.2f > 5",
actual_delta_time);
// Send the next frame
if ( !sendFrame(paused_image, &paused_timestamp) )
if (!sendFrame(paused_image, paused_timestamp))
zm_terminate = true;
} else {
Debug(2, "Would have sent keepalive frame, but had no paused_image");
@ -767,7 +767,7 @@ void MonitorStream::runStream() {
temp_image_buffer[temp_index].valid = true;
}
temp_image_buffer[temp_index].timestamp = monitor->shared_timestamps[index];
monitor->image_buffer[index].image->WriteJpeg(
monitor->image_buffer[index]->WriteJpeg(
temp_image_buffer[temp_index].file_name,
config.jpeg_file_quality
);
@ -865,8 +865,7 @@ void MonitorStream::SingleImage(int scale) {
}
int index = monitor->shared_data->last_write_index % monitor->image_buffer_count;
Debug(1, "write index: %d %d", monitor->shared_data->last_write_index, index);
ZMPacket *snap = &(monitor->image_buffer[index]);
Image *snap_image = snap->image;
Image *snap_image = monitor->image_buffer[index];
if ( scale != ZM_SCALE_BASE ) {
scaled_image.Assign(*snap_image);
@ -874,7 +873,7 @@ void MonitorStream::SingleImage(int scale) {
snap_image = &scaled_image;
}
if ( !config.timestamp_on_capture ) {
monitor->TimestampImage(snap_image, snap->timestamp);
monitor->TimestampImage(snap_image, monitor->shared_timestamps[index]);
}
snap_image->EncodeJpeg(img_buffer, &img_buffer_size);

View File

@ -45,8 +45,8 @@ class MonitorStream : public StreamBase {
protected:
bool checkSwapPath(const char *path, bool create_path);
bool sendFrame(const char *filepath, struct timeval *timestamp);
bool sendFrame(Image *image, struct timeval *timestamp);
bool sendFrame(const char *filepath, const timeval &timestamp);
bool sendFrame(Image *image, const timeval &timestamp);
void processCommand(const CmdMsg *msg) override;
void SingleImage(int scale=100);
void SingleImageRaw(int scale=100);

View File

@ -31,7 +31,7 @@ ZMPacket::ZMPacket() :
stream(nullptr),
in_frame(nullptr),
out_frame(nullptr),
timestamp(nullptr),
timestamp({}),
buffer(nullptr),
image(nullptr),
analysis_image(nullptr),
@ -41,6 +41,28 @@ ZMPacket::ZMPacket() :
codec_imgsize(0),
pts(0),
decoded(0)
{
Debug(1, "ZMPacket");
av_init_packet(&packet);
packet.size = 0; // So we can detect whether it has been filled.
Debug(1, "ZMPacket");
}
ZMPacket::ZMPacket(Image *i, const timeval &tv) :
keyframe(0),
stream(nullptr),
in_frame(nullptr),
out_frame(nullptr),
timestamp(tv),
buffer(nullptr),
image(i),
analysis_image(nullptr),
score(-1),
codec_type(AVMEDIA_TYPE_UNKNOWN),
image_index(-1),
codec_imgsize(0),
pts(0),
decoded(0)
{
av_init_packet(&packet);
packet.size = 0; // So we can detect whether it has been filled.
@ -51,7 +73,7 @@ ZMPacket::ZMPacket(ZMPacket &p) :
stream(nullptr),
in_frame(nullptr),
out_frame(nullptr),
timestamp(nullptr),
timestamp(p.timestamp),
buffer(nullptr),
image(nullptr),
analysis_image(nullptr),
@ -68,18 +90,15 @@ ZMPacket::ZMPacket(ZMPacket &p) :
if ( zm_av_packet_ref(&packet, &p.packet) < 0 ) {
Error("error refing packet");
}
timestamp = new struct timeval;
*timestamp = *p.timestamp;
}
ZMPacket::~ZMPacket() {
zm_av_packet_unref(&packet);
if ( in_frame ) av_frame_free(&in_frame);
if ( out_frame ) av_frame_free(&out_frame);
if ( buffer ) av_freep(&buffer);
if ( analysis_image ) delete analysis_image;
if ( image ) delete image;
if ( timestamp ) delete timestamp;
if (in_frame) av_frame_free(&in_frame);
if (out_frame) av_frame_free(&out_frame);
if (buffer) av_freep(&buffer);
if (analysis_image) delete analysis_image;
if (image) delete image;
}
/* returns < 0 on error, 0 on not ready, int bytes consumed on success
@ -227,7 +246,7 @@ AVPacket *ZMPacket::set_packet(AVPacket *p) {
Error("error refing packet");
}
//ZM_DUMP_PACKET(packet, "zmpacket:");
gettimeofday(timestamp, nullptr);
gettimeofday(&timestamp, nullptr);
keyframe = p->flags & AV_PKT_FLAG_KEY;
return &packet;
}

View File

@ -41,6 +41,7 @@ class ZMPacket {
public:
std::mutex mutex_;
// The condition has to be in the packet because it is shared between locks
std::condition_variable condition_;
int keyframe;
@ -48,7 +49,7 @@ class ZMPacket {
AVPacket packet; // Input packet, undecoded
AVFrame *in_frame; // Input image, decoded Theoretically only filled if needed.
AVFrame *out_frame; // output image, Only filled if needed.
struct timeval *timestamp;
timeval timestamp;
uint8_t *buffer; // buffer used in image
Image *image;
Image *analysis_image;
@ -69,7 +70,7 @@ class ZMPacket {
int is_keyframe() { return keyframe; };
int decode( AVCodecContext *ctx );
explicit ZMPacket(Image *image);
explicit ZMPacket(Image *image, const timeval &tv);
explicit ZMPacket(ZMPacket &packet);
ZMPacket();
~ZMPacket();
@ -81,11 +82,11 @@ class ZMPacket {
class ZMLockedPacket {
public:
ZMPacket *packet_;
std::shared_ptr<ZMPacket> packet_;
std::unique_lock<std::mutex> lck_;
bool locked;
explicit ZMLockedPacket(ZMPacket *p) :
explicit ZMLockedPacket(std::shared_ptr<ZMPacket> p) :
packet_(p),
lck_(packet_->mutex_, std::defer_lock),
locked(false) {

View File

@ -75,8 +75,7 @@ PacketQueue::~PacketQueue() {
* Thus it will ensure that the same packet never gets queued twice.
*/
bool PacketQueue::queuePacket(ZMPacket* add_packet) {
Debug(4, "packetqueue queuepacket %p %d", add_packet, add_packet->image_index);
bool PacketQueue::queuePacket(std::shared_ptr<ZMPacket> add_packet) {
if (iterators.empty()) {
Debug(4, "No iterators so no one needs us to queue packets.");
return false;
@ -97,7 +96,7 @@ bool PacketQueue::queuePacket(ZMPacket* add_packet) {
if (add_packet->keyframe) {
// Have a new keyframe, so delete everything
while ((*pktQueue.begin() != add_packet) and (packet_counts[video_stream_id] > max_video_packet_count)) {
ZMPacket *zm_packet = *pktQueue.begin();
std::shared_ptr <ZMPacket>zm_packet = *pktQueue.begin();
ZMLockedPacket *lp = new ZMLockedPacket(zm_packet);
if (!lp->trylock()) {
Debug(1, "Found locked packet when trying to free up video packets. Can't continue");
@ -129,7 +128,6 @@ bool PacketQueue::queuePacket(ZMPacket* add_packet) {
packet_counts[video_stream_id],
max_video_packet_count,
pktQueue.size());
delete zm_packet;
} // end while
}
} // end if too many video packets
@ -166,7 +164,7 @@ bool PacketQueue::queuePacket(ZMPacket* add_packet) {
return true;
} // end bool PacketQueue::queuePacket(ZMPacket* zm_packet)
void PacketQueue::clearPackets(ZMPacket *add_packet) {
void PacketQueue::clearPackets(const std::shared_ptr<ZMPacket> &add_packet) {
// Only do queueCleaning if we are adding a video keyframe, so that we guarantee that there is one.
// No good. Have to satisfy two conditions:
// 1. packetqueue starts with a video keyframe
@ -211,7 +209,7 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
if (!keep_keyframes) {
// If not doing passthrough, we don't care about starting with a keyframe so logic is simpler
while ((*pktQueue.begin() != add_packet) and (packet_counts[video_stream_id] > pre_event_video_packet_count + tail_count)) {
ZMPacket *zm_packet = *pktQueue.begin();
std::shared_ptr<ZMPacket> zm_packet = *pktQueue.begin();
ZMLockedPacket *lp = new ZMLockedPacket(zm_packet);
if (!lp->trylock()) break;
delete lp;
@ -231,7 +229,7 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
packet_counts[video_stream_id],
pre_event_video_packet_count,
pktQueue.size());
delete zm_packet;
//delete zm_packet;
} // end while
return;
}
@ -241,7 +239,11 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
int video_packets_to_delete = 0; // This is a count of how many packets we will delete so we know when to stop looking
// First packet is special because we know it is a video keyframe and only need to check for lock
ZMPacket *zm_packet = *it;
std::shared_ptr<ZMPacket> zm_packet = *it;
if (zm_packet == add_packet) {
return;
}
Debug(1, "trying lock on first packet");
ZMLockedPacket *lp = new ZMLockedPacket(zm_packet);
if (lp->trylock()) {
@ -288,10 +290,10 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
( *it == add_packet ),
( next_front == pktQueue.begin() )
);
if ( next_front != pktQueue.begin() ) {
while ( pktQueue.begin() != next_front ) {
ZMPacket *zm_packet = *pktQueue.begin();
if ( !zm_packet ) {
if (next_front != pktQueue.begin()) {
while (pktQueue.begin() != next_front) {
std::shared_ptr<ZMPacket> zm_packet = *pktQueue.begin();
if (!zm_packet) {
Error("NULL zm_packet in queue");
continue;
}
@ -306,7 +308,7 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
pktQueue.size());
pktQueue.pop_front();
packet_counts[zm_packet->packet.stream_index] -= 1;
delete zm_packet;
//delete zm_packet;
}
} // end if have at least max_video_packet_count video packets remaining
// We signal on every packet because someday we may analyze sound
@ -314,116 +316,6 @@ void PacketQueue::clearPackets(ZMPacket *add_packet) {
return;
} // end voidPacketQueue::clearPackets(ZMPacket* zm_packet)
ZMLockedPacket* PacketQueue::popPacket( ) {
Debug(4, "pktQueue size %zu", pktQueue.size());
if ( pktQueue.empty() ) {
return nullptr;
}
Debug(4, "poPacket Mutex locking");
std::unique_lock<std::mutex> lck(mutex);
ZMPacket *zm_packet = pktQueue.front();
for (
std::list<packetqueue_iterator *>::iterator iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
packetqueue_iterator *iterator_it = *iterators_it;
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if ( *(*iterator_it) == zm_packet ) {
Debug(4, "Bumping it because it is at the front that we are deleting");
++(*iterators_it);
}
} // end foreach iterator
ZMLockedPacket *lp = new ZMLockedPacket (zm_packet);
lp->lock();
pktQueue.pop_front();
packet_counts[zm_packet->packet.stream_index] -= 1;
return lp;
} // popPacket
/* Keeps frames_to_keep frames of the provided stream, which theoretically is the video stream
* Basically it starts at the end, moving backwards until it finds the minimum video frame.
* Then it should probably move forward to find a keyframe. The first video frame must always be a keyframe.
* So really frames_to_keep is a maximum which isn't so awesome.. maybe we should go back farther to find the keyframe in which case
* frames_to_keep in a minimum
*/
unsigned int PacketQueue::clear(unsigned int frames_to_keep, int stream_id) {
Debug(3, "Clearing all but %d frames, queue has %zu", frames_to_keep, pktQueue.size());
if ( pktQueue.empty() ) {
return 0;
}
// If size is <= frames_to_keep since it could contain audio, we can't possibly do anything
if ( pktQueue.size() <= frames_to_keep ) {
return 0;
}
Debug(5, "Locking in clear");
std::unique_lock<std::mutex> lck(mutex);
packetqueue_iterator it = pktQueue.end()--; // point to last element instead of end
ZMPacket *zm_packet = nullptr;
while ( (it != pktQueue.begin()) and frames_to_keep ) {
zm_packet = *it;
AVPacket *av_packet = &(zm_packet->packet);
Debug(3, "Looking at packet with stream index (%d) with keyframe(%d), Image_index(%d) frames_to_keep is (%d)",
av_packet->stream_index, zm_packet->keyframe, zm_packet->image_index, frames_to_keep );
// Want frames_to_keep video keyframes. Otherwise, we may not have enough
if ( av_packet->stream_index == stream_id ) {
frames_to_keep --;
}
it --;
}
// Either at beginning or frames_to_keep == 0
if ( it == pktQueue.begin() ) {
if ( frames_to_keep ) {
Warning("Couldn't remove any packets, needed %d", frames_to_keep);
}
mutex.unlock();
return 0;
}
int delete_count = 0;
// Else not at beginning, are pointing at packet before the last video packet
while ( pktQueue.begin() != it ) {
Debug(4, "Deleting a packet from the front, count is (%d), queue size is %zu",
delete_count, pktQueue.size());
zm_packet = pktQueue.front();
for (
std::list<packetqueue_iterator *>::iterator iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
packetqueue_iterator *iterator_it = *iterators_it;
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if ( *(*iterator_it) == zm_packet ) {
Debug(4, "Bumping it because it is at the front that we are deleting");
++(*iterators_it);
}
} // end foreach iterator
packet_counts[zm_packet->packet.stream_index] --;
pktQueue.pop_front();
//if ( zm_packet->image_index == -1 )
delete zm_packet;
delete_count += 1;
} // while our iterator is not the first packet
Debug(3, "Deleted %d packets, %zu remaining", delete_count, pktQueue.size());
return delete_count;
} // end unsigned int PacketQueue::clear( unsigned int frames_to_keep, int stream_id )
void PacketQueue::clear() {
deleting = true;
condition.notify_all();
@ -431,13 +323,13 @@ void PacketQueue::clear() {
std::unique_lock<std::mutex> lck(mutex);
while (!pktQueue.empty()) {
ZMPacket *packet = pktQueue.front();
std::shared_ptr<ZMPacket> packet = pktQueue.front();
// Someone might have this packet, but not for very long and since we have locked the queue they won't be able to get another one
ZMLockedPacket *lp = new ZMLockedPacket(packet);
lp->lock();
pktQueue.pop_front();
delete lp;
delete packet;
//delete packet;
}
for (
@ -449,119 +341,32 @@ void PacketQueue::clear() {
*iterator_it = pktQueue.begin();
} // end foreach iterator
if ( packet_counts ) delete[] packet_counts;
if (packet_counts) delete[] packet_counts;
packet_counts = nullptr;
max_stream_id = -1;
condition.notify_all();
}
// clear queue keeping only specified duration of video -- return number of pkts removed
unsigned int PacketQueue::clear(struct timeval *duration, int streamId) {
if ( pktQueue.empty() ) {
return 0;
}
Debug(4, "Locking in clear");
std::unique_lock<std::mutex> lck(mutex);
struct timeval keep_from;
std::list<ZMPacket *>::reverse_iterator it = pktQueue.rbegin();
struct timeval *t = (*it)->timestamp;
timersub(t, duration, &keep_from);
++it;
Debug(3, "Looking for frame before queue keep time with stream id (%d), queue has %zu packets",
streamId, pktQueue.size());
for ( ; it != pktQueue.rend(); ++it) {
ZMPacket *zm_packet = *it;
AVPacket *av_packet = &(zm_packet->packet);
if (
(av_packet->stream_index == streamId)
and
timercmp(zm_packet->timestamp, &keep_from, <=)
) {
Debug(3, "Found frame before keep time with stream index %d at %" PRIi64 ".%" PRIi64,
av_packet->stream_index,
static_cast<int64>(zm_packet->timestamp->tv_sec),
static_cast<int64>(zm_packet->timestamp->tv_usec));
break;
}
}
if ( it == pktQueue.rend() ) {
Debug(1, "Didn't find a frame before queue preserve time. keeping all");
mutex.unlock();
return 0;
}
Debug(3, "Looking for keyframe");
for ( ; it != pktQueue.rend(); ++it) {
ZMPacket *zm_packet = *it;
AVPacket *av_packet = &(zm_packet->packet);
if (
(av_packet->flags & AV_PKT_FLAG_KEY)
and
(av_packet->stream_index == streamId)
) {
Debug(3, "Found keyframe before start with stream index %d at %" PRIi64 ".%" PRIi64,
av_packet->stream_index,
static_cast<int64>(zm_packet->timestamp->tv_sec),
static_cast<int64>(zm_packet->timestamp->tv_usec));
break;
}
}
if ( it == pktQueue.rend() ) {
Debug(1, "Didn't find a keyframe before event starttime. keeping all" );
return 0;
}
unsigned int deleted_frames = 0;
ZMPacket *zm_packet = nullptr;
while ( distance(it, pktQueue.rend()) > 1 ) {
zm_packet = pktQueue.front();
for (
std::list<packetqueue_iterator *>::iterator iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
packetqueue_iterator *iterator_it = *iterators_it;
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if ( *(*iterator_it) == zm_packet ) {
Debug(4, "Bumping it because it is at the front that we are deleting");
++(*iterators_it);
}
} // end foreach iterator
pktQueue.pop_front();
packet_counts[zm_packet->packet.stream_index] -= 1;
delete zm_packet;
deleted_frames += 1;
}
Debug(3, "Deleted %d frames", deleted_frames);
return deleted_frames;
}
unsigned int PacketQueue::size() {
return pktQueue.size();
}
int PacketQueue::packet_count(int stream_id) {
if ( stream_id < 0 or stream_id > max_stream_id ) {
if (stream_id < 0 or stream_id > max_stream_id) {
Error("Invalid stream_id %d max is %d", stream_id, max_stream_id);
return -1;
}
return packet_counts[stream_id];
} // end int PacketQueue::packet_count(int stream_id)
// Returns a packet. Packet will be locked
ZMLockedPacket *PacketQueue::get_packet(packetqueue_iterator *it) {
if (deleting or zm_terminate)
return nullptr;
Debug(4, "Locking in get_packet using it %p queue end? %d, packet %p",
std::addressof(*it), (*it == pktQueue.end()), *(*it));
Debug(4, "Locking in get_packet using it %p queue end? %d",
std::addressof(*it), (*it == pktQueue.end()));
std::unique_lock<std::mutex> lck(mutex);
Debug(4, "Have Lock in get_packet");
@ -582,13 +387,13 @@ ZMLockedPacket *PacketQueue::get_packet(packetqueue_iterator *it) {
return nullptr;
}
ZMPacket *p = *(*it);
std::shared_ptr<ZMPacket> p = *(*it);
if (!p) {
Error("Null p?!");
return nullptr;
}
Debug(4, "get_packet using it %p locking index %d, packet %p",
std::addressof(*it), p->image_index, p);
Debug(4, "get_packet using it %p locking index %d",
std::addressof(*it), p->image_index);
// Packets are only deleted by packetqueue, so lock must be held.
// We shouldn't have to trylock. Someone else might hold the lock but not for long
@ -655,7 +460,7 @@ packetqueue_iterator *PacketQueue::get_event_start_packet_it(
iterators.push_back(it);
*it = snapshot_it;
ZMPacket *packet = *(*it);
std::shared_ptr<ZMPacket> packet = *(*it);
ZM_DUMP_PACKET(packet->packet, "");
// Step one count back pre_event_count frames as the minimum
// Do not assume that snapshot_it is video
@ -702,9 +507,9 @@ packetqueue_iterator *PacketQueue::get_event_start_packet_it(
} // end packetqueue_iterator *PacketQueue::get_event_start_packet_it
void PacketQueue::dumpQueue() {
std::list<ZMPacket *>::reverse_iterator it;
std::list<std::shared_ptr<ZMPacket>>::reverse_iterator it;
for ( it = pktQueue.rbegin(); it != pktQueue.rend(); ++ it ) {
ZMPacket *zm_packet = *it;
std::shared_ptr<ZMPacket> zm_packet = *it;
ZM_DUMP_PACKET(zm_packet->packet, "");
}
}
@ -732,8 +537,8 @@ packetqueue_iterator * PacketQueue::get_video_it(bool wait) {
}
}
while ( *it != pktQueue.end() ) {
ZMPacket *zm_packet = *(*it);
while (*it != pktQueue.end()) {
std::shared_ptr<ZMPacket> zm_packet = *(*it);
if (!zm_packet) {
Error("Null zmpacket in queue!?");
free_it(it);
@ -764,19 +569,19 @@ void PacketQueue::free_it(packetqueue_iterator *it) {
}
}
bool PacketQueue::is_there_an_iterator_pointing_to_packet(ZMPacket *zm_packet) {
bool PacketQueue::is_there_an_iterator_pointing_to_packet(const std::shared_ptr<ZMPacket> &zm_packet) {
for (
std::list<packetqueue_iterator *>::iterator iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
packetqueue_iterator *iterator_it = *iterators_it;
if ( *iterator_it == pktQueue.end() ) {
if (*iterator_it == pktQueue.end()) {
continue;
}
Debug(4, "Checking iterator %p == packet ? %d", std::addressof(*iterator_it), ( *(*iterator_it) == zm_packet ));
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if ( *(*iterator_it) == zm_packet ) {
if (*(*iterator_it) == zm_packet) {
return true;
}
} // end foreach iterator
@ -786,13 +591,13 @@ bool PacketQueue::is_there_an_iterator_pointing_to_packet(ZMPacket *zm_packet) {
void PacketQueue::setMaxVideoPackets(int p) {
max_video_packet_count = p;
Debug(1, "Setting max_video_packet_count to %d", p);
if ( max_video_packet_count < 0 )
if (max_video_packet_count < 0)
max_video_packet_count = 0 ;
}
void PacketQueue::setPreEventVideoPackets(int p) {
pre_event_video_packet_count = p;
Debug(1, "Setting pre_event_video_packet_count to %d", p);
if ( pre_event_video_packet_count < 1 )
if (pre_event_video_packet_count < 1)
pre_event_video_packet_count = 1;
// We can simplify a lot of logic in queuePacket if we can assume at least 1 packet in queue
}

View File

@ -26,12 +26,12 @@
class ZMPacket;
class ZMLockedPacket;
typedef std::list<ZMPacket *>::iterator packetqueue_iterator;
typedef std::list<std::shared_ptr<ZMPacket>>::iterator packetqueue_iterator;
class PacketQueue {
public: // For now just to ease development
std::list<ZMPacket *> pktQueue;
std::list<ZMPacket *>::iterator analysis_it;
std::list<std::shared_ptr<ZMPacket>> pktQueue;
std::list<std::shared_ptr<ZMPacket>>::iterator analysis_it;
int video_stream_id;
int max_video_packet_count; // allow a negative value to someday mean unlimited
@ -49,27 +49,21 @@ class PacketQueue {
public:
PacketQueue();
virtual ~PacketQueue();
std::list<ZMPacket *>::const_iterator end() const { return pktQueue.end(); }
std::list<ZMPacket *>::const_iterator begin() const { return pktQueue.begin(); }
std::list<std::shared_ptr<ZMPacket>>::const_iterator end() const { return pktQueue.end(); }
std::list<std::shared_ptr<ZMPacket>>::const_iterator begin() const { return pktQueue.begin(); }
int addStream();
void setMaxVideoPackets(int p);
void setPreEventVideoPackets(int p);
void setKeepKeyframes(bool k) { keep_keyframes = k; };
bool queuePacket(ZMPacket* packet);
ZMLockedPacket * popPacket();
bool popVideoPacket(ZMPacket* packet);
bool popAudioPacket(ZMPacket* packet);
unsigned int clear(unsigned int video_frames_to_keep, int stream_id);
unsigned int clear(struct timeval *duration, int streamid);
bool queuePacket(std::shared_ptr<ZMPacket> packet);
void clear();
void dumpQueue();
unsigned int size();
unsigned int get_packet_count(int stream_id) const { return packet_counts[stream_id]; };
void clear_unwanted_packets(timeval *recording, int pre_event_count, int mVideoStreamId);
void clearPackets(ZMPacket *);
void clearPackets(const std::shared_ptr<ZMPacket> &packet);
int packet_count(int stream_id);
bool increment_it(packetqueue_iterator *it);
@ -83,7 +77,7 @@ class PacketQueue {
packetqueue_iterator snapshot_it,
unsigned int pre_event_count
);
bool is_there_an_iterator_pointing_to_packet(ZMPacket *zm_packet);
bool is_there_an_iterator_pointing_to_packet(const std::shared_ptr<ZMPacket> &zm_packet);
void unlock(ZMLockedPacket *lp);
};

View File

@ -83,7 +83,7 @@ public:
virtual int Disconnect() = 0;
virtual int PreCapture() override { return 0; };
virtual int PrimeCapture() override { return 0; };
virtual int Capture(ZMPacket &p) override = 0;
virtual int Capture(std::shared_ptr<ZMPacket> &p) override = 0;
virtual int PostCapture() override = 0;
int Read(int fd, char*buf, int size);
};

View File

@ -1069,7 +1069,7 @@ int RemoteCameraHttp::PreCapture() {
return 1;
} // end int RemoteCameraHttp::PreCapture()
int RemoteCameraHttp::Capture(ZMPacket &packet) {
int RemoteCameraHttp::Capture(std::shared_ptr<ZMPacket> &packet) {
int content_length = GetResponse();
if (content_length == 0) {
Warning("Unable to capture image, retrying");
@ -1080,15 +1080,15 @@ int RemoteCameraHttp::Capture(ZMPacket &packet) {
return -1;
}
if (!packet.image) {
if (!packet->image) {
Debug(4, "Allocating image");
packet.image = new Image(width, height, colours, subpixelorder);
packet->image = new Image(width, height, colours, subpixelorder);
}
Image *image = packet.image;
packet.keyframe = 1;
packet.codec_type = AVMEDIA_TYPE_VIDEO;
packet.packet.stream_index = mVideoStreamId;
packet.stream = mVideoStream;
Image *image = packet->image;
packet->keyframe = 1;
packet->codec_type = AVMEDIA_TYPE_VIDEO;
packet->packet.stream_index = mVideoStreamId;
packet->stream = mVideoStream;
switch (format) {
case JPEG :

View File

@ -69,7 +69,7 @@ public:
int GetResponse();
int PrimeCapture() override;
int PreCapture() override;
int Capture( ZMPacket &p ) override;
int Capture(std::shared_ptr<ZMPacket> &p) override;
int PostCapture() override;
int Close() override { Disconnect(); return 0; };
};

View File

@ -184,9 +184,9 @@ int RemoteCameraNVSocket::PrimeCapture() {
return 0;
}
int RemoteCameraNVSocket::Capture( ZMPacket &zm_packet ) {
if ( SendRequest("GetNextImage\n") < 0 ) {
Warning( "Unable to capture image, retrying" );
int RemoteCameraNVSocket::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
if (SendRequest("GetNextImage\n") < 0) {
Warning("Unable to capture image, retrying");
return 0;
}
int bytes_read = Read(sd, buffer, imagesize);
@ -195,17 +195,17 @@ int RemoteCameraNVSocket::Capture( ZMPacket &zm_packet ) {
return 0;
}
uint32_t end;
if ( Read(sd, (char *) &end , sizeof(end)) < 0 ) {
if (Read(sd, (char *) &end , sizeof(end)) < 0) {
Warning("Unable to capture image, retrying");
return 0;
}
if ( end != 0xFFFFFFFF) {
if (end != 0xFFFFFFFF) {
Warning("End Bytes Failed\n");
return 0;
}
zm_packet.image->Assign(width, height, colours, subpixelorder, buffer, imagesize);
zm_packet.keyframe = 1;
zm_packet->image->Assign(width, height, colours, subpixelorder, buffer, imagesize);
zm_packet->keyframe = 1;
return 1;
}

View File

@ -54,7 +54,7 @@ public:
int SendRequest(std::string);
int GetResponse();
int PrimeCapture() override;
int Capture(ZMPacket &p) override;
int Capture(std::shared_ptr<ZMPacket> &p) override;
int PostCapture() override;
int Close() override { return 0; };
};

View File

@ -218,16 +218,16 @@ int RemoteCameraRtsp::PreCapture() {
return 1;
}
int RemoteCameraRtsp::Capture(ZMPacket &zm_packet) {
int RemoteCameraRtsp::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
int frameComplete = false;
AVPacket *packet = &zm_packet.packet;
if ( !zm_packet.image ) {
AVPacket *packet = &zm_packet->packet;
if ( !zm_packet->image ) {
Debug(1, "Allocating image %dx%d %d colours %d", width, height, colours, subpixelorder);
zm_packet.image = new Image(width, height, colours, subpixelorder);
zm_packet->image = new Image(width, height, colours, subpixelorder);
}
while ( !frameComplete ) {
while (!frameComplete) {
buffer.clear();
if (!rtspThread || rtspThread->IsStopped())
return -1;
@ -254,7 +254,7 @@ int RemoteCameraRtsp::Capture(ZMPacket &zm_packet) {
continue;
} else if ( nalType == 5 ) {
packet->flags |= AV_PKT_FLAG_KEY;
zm_packet.keyframe = 1;
zm_packet->keyframe = 1;
// IDR
buffer += lastSps;
buffer += lastPps;
@ -275,14 +275,14 @@ int RemoteCameraRtsp::Capture(ZMPacket &zm_packet) {
gettimeofday(&now, NULL);
packet->pts = packet->dts = now.tv_sec*1000000+now.tv_usec;
int bytes_consumed = zm_packet.decode(mVideoCodecContext);
int bytes_consumed = zm_packet->decode(mVideoCodecContext);
if ( bytes_consumed < 0 ) {
Error("Error while decoding frame %d", frameCount);
//Hexdump(Logger::ERROR, buffer.head(), buffer.size()>256?256:buffer.size());
}
buffer -= packet->size;
if ( bytes_consumed ) {
zm_dump_video_frame(zm_packet.in_frame, "remote_rtsp_decode");
zm_dump_video_frame(zm_packet->in_frame, "remote_rtsp_decode");
if ( ! mVideoStream->
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
codecpar
@ -293,18 +293,18 @@ int RemoteCameraRtsp::Capture(ZMPacket &zm_packet) {
zm_dump_codec(mVideoCodecContext);
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
zm_dump_codecpar(mVideoStream->codecpar);
mVideoStream->codecpar->width = zm_packet.in_frame->width;
mVideoStream->codecpar->height = zm_packet.in_frame->height;
mVideoStream->codecpar->width = zm_packet->in_frame->width;
mVideoStream->codecpar->height = zm_packet->in_frame->height;
#else
mVideoStream->codec->width = zm_packet.in_frame->width;
mVideoStream->codec->height = zm_packet.in_frame->height;
mVideoStream->codec->width = zm_packet->in_frame->width;
mVideoStream->codec->height = zm_packet->in_frame->height;
#endif
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
zm_dump_codecpar(mVideoStream->codecpar);
#endif
}
zm_packet.codec_type = mVideoCodecContext->codec_type;
zm_packet.stream = mVideoStream;
zm_packet->codec_type = mVideoCodecContext->codec_type;
zm_packet->stream = mVideoStream;
frameComplete = true;
Debug(2, "Frame: %d - %d/%d", frameCount, bytes_consumed, buffer.size());
packet->data = nullptr;

View File

@ -79,7 +79,7 @@ public:
int PrimeCapture() override;
int PreCapture() override;
int Capture(ZMPacket &p) override;
int Capture(std::shared_ptr <ZMPacket> &p) override;
int PostCapture() override;
int Close() override { return 0; };

View File

@ -1002,17 +1002,17 @@ bool VideoStore::setup_resampler() {
#endif
} // end bool VideoStore::setup_resampler()
int VideoStore::writePacket(ZMPacket *ipkt) {
if ( ipkt->codec_type == AVMEDIA_TYPE_VIDEO ) {
int VideoStore::writePacket(const std::shared_ptr<ZMPacket> &ipkt) {
if (ipkt->codec_type == AVMEDIA_TYPE_VIDEO) {
return writeVideoFramePacket(ipkt);
} else if ( ipkt->codec_type == AVMEDIA_TYPE_AUDIO ) {
} else if (ipkt->codec_type == AVMEDIA_TYPE_AUDIO) {
return writeAudioFramePacket(ipkt);
}
Error("Unknown stream type in packet (%d)", ipkt->codec_type);
return 0;
}
int VideoStore::writeVideoFramePacket(ZMPacket *zm_packet) {
int VideoStore::writeVideoFramePacket(const std::shared_ptr<ZMPacket> &zm_packet) {
int ret;
frame_count += 1;
@ -1056,8 +1056,8 @@ int VideoStore::writeVideoFramePacket(ZMPacket *zm_packet) {
swscale.Convert(zm_packet->in_frame, out_frame);
} else {
Error("Have neither in_frame or image in packet %p %d!",
zm_packet, zm_packet->image_index);
Error("Have neither in_frame or image in packet %d!",
zm_packet->image_index);
return 0;
} // end if has packet or image
} else {
@ -1104,13 +1104,13 @@ int VideoStore::writeVideoFramePacket(ZMPacket *zm_packet) {
frame->pkt_duration = 0;
#endif
int64_t in_pts = zm_packet->timestamp->tv_sec * (uint64_t)1000000 + zm_packet->timestamp->tv_usec;
int64_t in_pts = zm_packet->timestamp.tv_sec * (uint64_t)1000000 + zm_packet->timestamp.tv_usec;
if ( !video_first_pts ) {
video_first_pts = in_pts;
Debug(2, "No video_first_pts, set to (%" PRId64 ") secs(%" PRIi64 ") usecs(%" PRIi64 ")",
video_first_pts,
static_cast<int64>(zm_packet->timestamp->tv_sec),
static_cast<int64>(zm_packet->timestamp->tv_usec));
static_cast<int64>(zm_packet->timestamp.tv_sec),
static_cast<int64>(zm_packet->timestamp.tv_usec));
frame->pts = 0;
} else {
uint64_t useconds = in_pts - video_first_pts;
@ -1121,8 +1121,8 @@ int VideoStore::writeVideoFramePacket(ZMPacket *zm_packet) {
frame->pts,
video_first_pts,
useconds,
static_cast<int64>(zm_packet->timestamp->tv_sec),
static_cast<int64>(zm_packet->timestamp->tv_usec),
static_cast<int64>(zm_packet->timestamp.tv_sec),
static_cast<int64>(zm_packet->timestamp.tv_usec),
video_out_ctx->time_base.num,
video_out_ctx->time_base.den);
}
@ -1228,7 +1228,7 @@ int VideoStore::writeVideoFramePacket(ZMPacket *zm_packet) {
return 1;
} // end int VideoStore::writeVideoFramePacket( AVPacket *ipkt )
int VideoStore::writeAudioFramePacket(ZMPacket *zm_packet) {
int VideoStore::writeAudioFramePacket(const std::shared_ptr<ZMPacket> &zm_packet) {
AVPacket *ipkt = &zm_packet->packet;
int ret;

View File

@ -6,6 +6,8 @@
#include "zm_ffmpeg.h"
#include "zm_swscale.h"
#include <memory>
extern "C" {
#ifdef HAVE_LIBSWRESAMPLE
#include "libswresample/swresample.h"
@ -119,9 +121,9 @@ class VideoStore {
void write_video_packet(AVPacket &pkt);
void write_audio_packet(AVPacket &pkt);
int writeVideoFramePacket(ZMPacket *pkt);
int writeAudioFramePacket(ZMPacket *pkt);
int writePacket(ZMPacket *pkt);
int writeVideoFramePacket(const std::shared_ptr<ZMPacket> &pkt);
int writeAudioFramePacket(const std::shared_ptr<ZMPacket> &pkt);
int writePacket(const std::shared_ptr<ZMPacket> &pkt);
int write_packets(PacketQueue &queue);
void flush_codecs();
};

View File

@ -109,7 +109,7 @@ void Zone::Setup(
} // end Zone::Setup
Zone::~Zone() {
if ( image )
if (image)
delete image;
delete pg_image;
delete[] ranges;

View File

@ -301,6 +301,7 @@ int main(int argc, char *argv[]) {
result = -1;
break;
}
Debug(1, "Capture:");
if (monitors[i]->Capture() < 0) {
Error("Failed to capture image from monitor %d %s (%zu/%zu)",
monitors[i]->Id(), monitors[i]->Name(), i + 1, monitors.size());

@ -1 +1 @@
Subproject commit 14292374ccf1328f2d5db20897bd06f99ba4d938
Subproject commit 0bd63fb464957080ead342db58ca9e01532cf1ef