Monitor: Convert API to std::chrono
This commit is contained in:
parent
dff5452f11
commit
707975e567
|
@ -295,7 +295,7 @@ bool Event::WriteFrameImage(Image *image, SystemTimePoint timestamp, const char
|
||||||
// stash the image we plan to use in another pointer regardless if timestamped.
|
// stash the image we plan to use in another pointer regardless if timestamped.
|
||||||
// exif is only timestamp at present this switches on or off for write
|
// exif is only timestamp at present this switches on or off for write
|
||||||
Image *ts_image = new Image(*image);
|
Image *ts_image = new Image(*image);
|
||||||
monitor->TimestampImage(ts_image, zm::chrono::duration_cast<timeval>(timestamp.time_since_epoch()));
|
monitor->TimestampImage(ts_image, timestamp);
|
||||||
rc = ts_image->WriteJpeg(event_file, thisquality, jpeg_timestamp);
|
rc = ts_image->WriteJpeg(event_file, thisquality, jpeg_timestamp);
|
||||||
delete ts_image;
|
delete ts_image;
|
||||||
} else {
|
} else {
|
||||||
|
@ -421,8 +421,6 @@ void Event::AddPacket(const std::shared_ptr<ZMPacket>&packet) {
|
||||||
have_video_keyframe, packet->codec_type, (packet->codec_type == AVMEDIA_TYPE_VIDEO), packet->keyframe);
|
have_video_keyframe, packet->codec_type, (packet->codec_type == AVMEDIA_TYPE_VIDEO), packet->keyframe);
|
||||||
ZM_DUMP_PACKET(packet->packet, "Adding to event");
|
ZM_DUMP_PACKET(packet->packet, "Adding to event");
|
||||||
|
|
||||||
SystemTimePoint packet_ts = SystemTimePoint(zm::chrono::duration_cast<Microseconds>(packet->timestamp));
|
|
||||||
|
|
||||||
if (videoStore) {
|
if (videoStore) {
|
||||||
if (have_video_keyframe) {
|
if (have_video_keyframe) {
|
||||||
videoStore->writePacket(packet);
|
videoStore->writePacket(packet);
|
||||||
|
@ -433,9 +431,9 @@ void Event::AddPacket(const std::shared_ptr<ZMPacket>&packet) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((packet->codec_type == AVMEDIA_TYPE_VIDEO) or packet->image) {
|
if ((packet->codec_type == AVMEDIA_TYPE_VIDEO) or packet->image) {
|
||||||
AddFrame(packet->image, packet_ts, packet->zone_stats, packet->score, packet->analysis_image);
|
AddFrame(packet->image, packet->timestamp, packet->zone_stats, packet->score, packet->analysis_image);
|
||||||
}
|
}
|
||||||
end_time = packet_ts;
|
end_time = packet->timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Event::WriteDbFrames() {
|
void Event::WriteDbFrames() {
|
||||||
|
|
|
@ -269,4 +269,4 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id, double at) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return get_frame(stream_id);
|
return get_frame(stream_id);
|
||||||
} // end AVFrame *FFmpeg_Input::get_frame( int stream_id, struct timeval at)
|
}
|
||||||
|
|
|
@ -569,8 +569,8 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
|
||||||
else if (alarm_frame_count > MAX_PRE_ALARM_FRAMES) alarm_frame_count = MAX_PRE_ALARM_FRAMES;
|
else if (alarm_frame_count > MAX_PRE_ALARM_FRAMES) alarm_frame_count = MAX_PRE_ALARM_FRAMES;
|
||||||
|
|
||||||
/* "SectionLength, MinSectionLength, FrameSkip, MotionFrameSkip, " */
|
/* "SectionLength, MinSectionLength, FrameSkip, MotionFrameSkip, " */
|
||||||
section_length = atoi(dbrow[col]); col++;
|
section_length = Seconds(atoi(dbrow[col])); col++;
|
||||||
min_section_length = atoi(dbrow[col]); col++;
|
min_section_length = Seconds(atoi(dbrow[col])); col++;
|
||||||
frame_skip = atoi(dbrow[col]); col++;
|
frame_skip = atoi(dbrow[col]); col++;
|
||||||
motion_frame_skip = atoi(dbrow[col]); col++;
|
motion_frame_skip = atoi(dbrow[col]); col++;
|
||||||
|
|
||||||
|
@ -1170,7 +1170,7 @@ int Monitor::GetImage(int32_t index, int scale) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!config.timestamp_on_capture) {
|
if (!config.timestamp_on_capture) {
|
||||||
TimestampImage(&alarm_image, shared_timestamps[index]);
|
TimestampImage(&alarm_image, SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
|
||||||
}
|
}
|
||||||
image = &alarm_image;
|
image = &alarm_image;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1192,20 +1192,20 @@ ZMPacket *Monitor::getSnapshot(int index) const {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
if (index != image_buffer_count) {
|
if (index != image_buffer_count) {
|
||||||
return new ZMPacket(image_buffer[index], shared_timestamps[index]);
|
return new ZMPacket(image_buffer[index],
|
||||||
|
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
|
||||||
} else {
|
} else {
|
||||||
Error("Unable to generate image, no images in buffer");
|
Error("Unable to generate image, no images in buffer");
|
||||||
}
|
}
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct timeval Monitor::GetTimestamp(int index) const {
|
SystemTimePoint Monitor::GetTimestamp(int index) const {
|
||||||
ZMPacket *packet = getSnapshot(index);
|
ZMPacket *packet = getSnapshot(index);
|
||||||
if (packet)
|
if (packet)
|
||||||
return packet->timestamp;
|
return packet->timestamp;
|
||||||
|
|
||||||
static struct timeval null_tv = { 0, 0 };
|
return {};
|
||||||
return null_tv;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int Monitor::GetLastReadIndex() const {
|
unsigned int Monitor::GetLastReadIndex() const {
|
||||||
|
@ -1859,7 +1859,7 @@ bool Monitor::Analyse() {
|
||||||
}
|
}
|
||||||
} // end if decoding enabled
|
} // end if decoding enabled
|
||||||
|
|
||||||
struct timeval *timestamp = &snap->timestamp;
|
SystemTimePoint timestamp = snap->timestamp;
|
||||||
|
|
||||||
if (Active() and (function == MODECT or function == MOCORD)) {
|
if (Active() and (function == MODECT or function == MOCORD)) {
|
||||||
Debug(3, "signal and active and modect");
|
Debug(3, "signal and active and modect");
|
||||||
|
@ -1922,23 +1922,18 @@ bool Monitor::Analyse() {
|
||||||
if (event) {
|
if (event) {
|
||||||
Debug(2, "Have event %" PRIu64 " in record", event->Id());
|
Debug(2, "Have event %" PRIu64 " in record", event->Id());
|
||||||
|
|
||||||
if (section_length &&
|
if (section_length != Seconds(0) && (timestamp - GetVideoWriterStartTime() >= section_length)
|
||||||
(( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= section_length)
|
&& ((function == MOCORD && event_close_mode != CLOSE_TIME)
|
||||||
&& (
|
|| (function == RECORD && event_close_mode == CLOSE_TIME)
|
||||||
((function == MOCORD) && (event_close_mode != CLOSE_TIME))
|
|| timestamp.time_since_epoch() % section_length == Seconds(0))) {
|
||||||
||
|
Info("%s: %03d - Closing event %" PRIu64 ", section end forced %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %" PRIi64 ,
|
||||||
( (function == RECORD) && (event_close_mode == CLOSE_TIME) )
|
|
||||||
|| ! ( timestamp->tv_sec % section_length )
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
Info("%s: %03d - Closing event %" PRIu64 ", section end forced %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %d",
|
|
||||||
name.c_str(),
|
name.c_str(),
|
||||||
image_count,
|
image_count,
|
||||||
event->Id(),
|
event->Id(),
|
||||||
static_cast<int64>(timestamp->tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
|
||||||
static_cast<int64>(video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
|
||||||
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
|
||||||
section_length);
|
static_cast<int64>(Seconds(section_length).count()));
|
||||||
closeEvent();
|
closeEvent();
|
||||||
} // end if section_length
|
} // end if section_length
|
||||||
} // end if event
|
} // end if event
|
||||||
|
@ -1968,10 +1963,7 @@ bool Monitor::Analyse() {
|
||||||
starting_packet = snap;
|
starting_packet = snap;
|
||||||
}
|
}
|
||||||
|
|
||||||
event = new Event(this,
|
event = new Event(this, starting_packet->timestamp, "Continuous", noteSetMap);
|
||||||
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(starting_packet->timestamp)),
|
|
||||||
"Continuous",
|
|
||||||
noteSetMap);
|
|
||||||
// Write out starting packets, do not modify packetqueue it will garbage collect itself
|
// Write out starting packets, do not modify packetqueue it will garbage collect itself
|
||||||
while (starting_packet and ((*start_it) != snap_it)) {
|
while (starting_packet and ((*start_it) != snap_it)) {
|
||||||
event->AddPacket(starting_packet);
|
event->AddPacket(starting_packet);
|
||||||
|
@ -1993,10 +1985,7 @@ bool Monitor::Analyse() {
|
||||||
start_it = nullptr;
|
start_it = nullptr;
|
||||||
} else {
|
} else {
|
||||||
// Create event from current snap
|
// Create event from current snap
|
||||||
event = new Event(this,
|
event = new Event(this, timestamp, "Continuous", noteSetMap);
|
||||||
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(*timestamp)),
|
|
||||||
"Continuous",
|
|
||||||
noteSetMap);
|
|
||||||
}
|
}
|
||||||
shared_data->last_event_id = event->Id();
|
shared_data->last_event_id = event->Id();
|
||||||
|
|
||||||
|
@ -2026,23 +2015,22 @@ bool Monitor::Analyse() {
|
||||||
if ((state == IDLE) || (state == TAPE) || (state == PREALARM)) {
|
if ((state == IDLE) || (state == TAPE) || (state == PREALARM)) {
|
||||||
// If we should end then previous continuous event and start a new non-continuous event
|
// If we should end then previous continuous event and start a new non-continuous event
|
||||||
if (event && event->Frames()
|
if (event && event->Frames()
|
||||||
&& (!event->AlarmFrames())
|
&& !event->AlarmFrames()
|
||||||
&& (event_close_mode == CLOSE_ALARM)
|
&& event_close_mode == CLOSE_ALARM
|
||||||
&& ( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= min_section_length )
|
&& timestamp - GetVideoWriterStartTime() >= min_section_length
|
||||||
&& ( (!pre_event_count) || (Event::PreAlarmCount() >= alarm_frame_count-1) )
|
&& (!pre_event_count || Event::PreAlarmCount() >= alarm_frame_count - 1)) {
|
||||||
) {
|
|
||||||
Info("%s: %03d - Closing event %" PRIu64 ", continuous end, alarm begins",
|
Info("%s: %03d - Closing event %" PRIu64 ", continuous end, alarm begins",
|
||||||
name.c_str(), image_count, event->Id());
|
name.c_str(), image_count, event->Id());
|
||||||
closeEvent();
|
closeEvent();
|
||||||
} else if (event) {
|
} else if (event) {
|
||||||
// This is so if we need more than 1 alarm frame before going into alarm, so it is basically if we have enough alarm frames
|
// This is so if we need more than 1 alarm frame before going into alarm, so it is basically if we have enough alarm frames
|
||||||
Debug(3,
|
Debug(3,
|
||||||
"pre_alarm_count in event %d, event frames %d, alarm frames %d event length %" PRIi64 " >=? %d min",
|
"pre_alarm_count in event %d, event frames %d, alarm frames %d event length %" PRIi64 " >=? %" PRIi64 " min",
|
||||||
Event::PreAlarmCount(),
|
Event::PreAlarmCount(),
|
||||||
event->Frames(),
|
event->Frames(),
|
||||||
event->AlarmFrames(),
|
event->AlarmFrames(),
|
||||||
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
|
||||||
min_section_length);
|
static_cast<int64>(Seconds(min_section_length).count()));
|
||||||
}
|
}
|
||||||
if ((!pre_event_count) || (Event::PreAlarmCount() >= alarm_frame_count-1)) {
|
if ((!pre_event_count) || (Event::PreAlarmCount() >= alarm_frame_count-1)) {
|
||||||
// lets construct alarm cause. It will contain cause + names of zones alarmed
|
// lets construct alarm cause. It will contain cause + names of zones alarmed
|
||||||
|
@ -2073,10 +2061,7 @@ bool Monitor::Analyse() {
|
||||||
starting_packet = snap;
|
starting_packet = snap;
|
||||||
}
|
}
|
||||||
|
|
||||||
event = new Event(this,
|
event = new Event(this, starting_packet->timestamp, cause, noteSetMap);
|
||||||
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(starting_packet->timestamp)),
|
|
||||||
cause,
|
|
||||||
noteSetMap);
|
|
||||||
shared_data->last_event_id = event->Id();
|
shared_data->last_event_id = event->Id();
|
||||||
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
|
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
|
||||||
SetVideoWriterStartTime(event->StartTime());
|
SetVideoWriterStartTime(event->StartTime());
|
||||||
|
@ -2142,11 +2127,8 @@ bool Monitor::Analyse() {
|
||||||
Info("%s: %03d - Gone into alert state", name.c_str(), analysis_image_count);
|
Info("%s: %03d - Gone into alert state", name.c_str(), analysis_image_count);
|
||||||
shared_data->state = state = ALERT;
|
shared_data->state = state = ALERT;
|
||||||
} else if (state == ALERT) {
|
} else if (state == ALERT) {
|
||||||
if (
|
if (analysis_image_count - last_alarm_count > post_event_count
|
||||||
( analysis_image_count-last_alarm_count > post_event_count )
|
&& timestamp - GetVideoWriterStartTime() >= min_section_length) {
|
||||||
&&
|
|
||||||
( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= min_section_length )
|
|
||||||
) {
|
|
||||||
Info("%s: %03d - Left alarm state (%" PRIu64 ") - %d(%d) images",
|
Info("%s: %03d - Left alarm state (%" PRIu64 ") - %d(%d) images",
|
||||||
name.c_str(), analysis_image_count, event->Id(), event->Frames(), event->AlarmFrames());
|
name.c_str(), analysis_image_count, event->Id(), event->Frames(), event->AlarmFrames());
|
||||||
//if ( function != MOCORD || event_close_mode == CLOSE_ALARM || event->Cause() == SIGNAL_CAUSE )
|
//if ( function != MOCORD || event_close_mode == CLOSE_ALARM || event->Cause() == SIGNAL_CAUSE )
|
||||||
|
@ -2164,14 +2146,14 @@ bool Monitor::Analyse() {
|
||||||
shared_data->state = state = ((function != MOCORD) ? IDLE : TAPE);
|
shared_data->state = state = ((function != MOCORD) ? IDLE : TAPE);
|
||||||
} else {
|
} else {
|
||||||
Debug(1,
|
Debug(1,
|
||||||
"State %s because image_count(%d)-last_alarm_count(%d) > post_event_count(%d) and timestamp.tv_sec(%" PRIi64 ") - recording.tv_src(%" PRIi64 ") >= min_section_length(%d)",
|
"State %s because image_count(%d)-last_alarm_count(%d) > post_event_count(%d) and timestamp.tv_sec(%" PRIi64 ") - recording.tv_src(%" PRIi64 ") >= min_section_length(%" PRIi64 ")",
|
||||||
State_Strings[state].c_str(),
|
State_Strings[state].c_str(),
|
||||||
analysis_image_count,
|
analysis_image_count,
|
||||||
last_alarm_count,
|
last_alarm_count,
|
||||||
post_event_count,
|
post_event_count,
|
||||||
static_cast<int64>(timestamp->tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
|
||||||
static_cast<int64>(video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
|
||||||
min_section_length);
|
static_cast<int64>(Seconds(min_section_length).count()));
|
||||||
}
|
}
|
||||||
if (Event::PreAlarmCount())
|
if (Event::PreAlarmCount())
|
||||||
Event::EmptyPreAlarmFrames();
|
Event::EmptyPreAlarmFrames();
|
||||||
|
@ -2195,10 +2177,7 @@ bool Monitor::Analyse() {
|
||||||
|
|
||||||
// incremement pre alarm image count
|
// incremement pre alarm image count
|
||||||
//have_pre_alarmed_frames ++;
|
//have_pre_alarmed_frames ++;
|
||||||
Event::AddPreAlarmFrame(snap->image,
|
Event::AddPreAlarmFrame(snap->image, timestamp, score, nullptr);
|
||||||
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(*timestamp)),
|
|
||||||
score,
|
|
||||||
nullptr);
|
|
||||||
} else if (state == ALARM) {
|
} else if (state == ALARM) {
|
||||||
for (const Zone &zone : zones) {
|
for (const Zone &zone : zones) {
|
||||||
if (zone.Alarmed()) {
|
if (zone.Alarmed()) {
|
||||||
|
@ -2212,19 +2191,15 @@ bool Monitor::Analyse() {
|
||||||
if (event) {
|
if (event) {
|
||||||
if (noteSetMap.size() > 0)
|
if (noteSetMap.size() > 0)
|
||||||
event->updateNotes(noteSetMap);
|
event->updateNotes(noteSetMap);
|
||||||
if ( section_length
|
if (section_length != Seconds(0) && (timestamp - GetVideoWriterStartTime() >= section_length)) {
|
||||||
&& ( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= section_length )
|
Warning("%s: %03d - event %" PRIu64 ", has exceeded desired section length. %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %" PRIi64,
|
||||||
) {
|
|
||||||
Warning("%s: %03d - event %" PRIu64 ", has exceeded desired section length. %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %d",
|
|
||||||
name.c_str(), analysis_image_count, event->Id(),
|
name.c_str(), analysis_image_count, event->Id(),
|
||||||
static_cast<int64>(timestamp->tv_sec), static_cast<int64>(video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
|
||||||
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
|
||||||
section_length);
|
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
|
||||||
|
static_cast<int64>(Seconds(section_length).count()));
|
||||||
closeEvent();
|
closeEvent();
|
||||||
event = new Event(this,
|
event = new Event(this, timestamp, cause, noteSetMap);
|
||||||
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(*timestamp)),
|
|
||||||
cause,
|
|
||||||
noteSetMap);
|
|
||||||
shared_data->last_event_id = event->Id();
|
shared_data->last_event_id = event->Id();
|
||||||
//set up video store data
|
//set up video store data
|
||||||
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
|
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
|
||||||
|
@ -2493,8 +2468,8 @@ int Monitor::Capture() {
|
||||||
|
|
||||||
std::shared_ptr<ZMPacket> packet = std::make_shared<ZMPacket>();
|
std::shared_ptr<ZMPacket> packet = std::make_shared<ZMPacket>();
|
||||||
packet->image_index = image_count;
|
packet->image_index = image_count;
|
||||||
gettimeofday(&(packet->timestamp), nullptr);
|
packet->timestamp = std::chrono::system_clock::now();
|
||||||
shared_data->zmc_heartbeat_time = packet->timestamp.tv_sec;
|
shared_data->zmc_heartbeat_time = std::chrono::system_clock::to_time_t(packet->timestamp);
|
||||||
int captureResult = camera->Capture(packet);
|
int captureResult = camera->Capture(packet);
|
||||||
Debug(4, "Back from capture result=%d image count %d", captureResult, image_count);
|
Debug(4, "Back from capture result=%d image count %d", captureResult, image_count);
|
||||||
|
|
||||||
|
@ -2511,7 +2486,7 @@ int Monitor::Capture() {
|
||||||
shared_data->last_write_index = index;
|
shared_data->last_write_index = index;
|
||||||
shared_data->last_write_time = shared_timestamps[index].tv_sec;
|
shared_data->last_write_time = shared_timestamps[index].tv_sec;
|
||||||
image_buffer[index]->Assign(*capture_image);
|
image_buffer[index]->Assign(*capture_image);
|
||||||
shared_timestamps[index] = packet->timestamp;
|
shared_timestamps[index] = zm::chrono::duration_cast<timeval>(packet->timestamp.time_since_epoch());
|
||||||
delete capture_image;
|
delete capture_image;
|
||||||
image_count++;
|
image_count++;
|
||||||
// What about timestamping it?
|
// What about timestamping it?
|
||||||
|
@ -2522,7 +2497,7 @@ int Monitor::Capture() {
|
||||||
// If we captured, let's assume signal, Decode will detect further
|
// If we captured, let's assume signal, Decode will detect further
|
||||||
if (!decoding_enabled) {
|
if (!decoding_enabled) {
|
||||||
shared_data->last_write_index = index;
|
shared_data->last_write_index = index;
|
||||||
shared_data->last_write_time = packet->timestamp.tv_sec;
|
shared_data->last_write_time = std::chrono::system_clock::to_time_t(packet->timestamp);
|
||||||
}
|
}
|
||||||
Debug(2, "Have packet stream_index:%d ?= videostream_id: %d q.vpktcount %d event? %d image_count %d",
|
Debug(2, "Have packet stream_index:%d ?= videostream_id: %d q.vpktcount %d event? %d image_count %d",
|
||||||
packet->packet.stream_index, video_stream_id, packetqueue.packet_count(video_stream_id), ( event ? 1 : 0 ), image_count);
|
packet->packet.stream_index, video_stream_id, packetqueue.packet_count(video_stream_id), ( event ? 1 : 0 ), image_count);
|
||||||
|
@ -2718,28 +2693,31 @@ bool Monitor::Decode() {
|
||||||
}
|
}
|
||||||
|
|
||||||
image_buffer[index]->Assign(*(packet->image));
|
image_buffer[index]->Assign(*(packet->image));
|
||||||
shared_timestamps[index] = packet->timestamp;
|
shared_timestamps[index] = zm::chrono::duration_cast<timeval>(packet->timestamp.time_since_epoch());
|
||||||
} // end if have image
|
} // end if have image
|
||||||
packet->decoded = true;
|
packet->decoded = true;
|
||||||
shared_data->signal = (capture_image and signal_check_points) ? CheckSignal(capture_image) : true;
|
shared_data->signal = (capture_image and signal_check_points) ? CheckSignal(capture_image) : true;
|
||||||
shared_data->last_write_index = index;
|
shared_data->last_write_index = index;
|
||||||
shared_data->last_write_time = packet->timestamp.tv_sec;
|
shared_data->last_write_time = std::chrono::system_clock::to_time_t(packet->timestamp);
|
||||||
packetqueue.unlock(packet_lock);
|
packetqueue.unlock(packet_lock);
|
||||||
return true;
|
return true;
|
||||||
} // end bool Monitor::Decode()
|
} // end bool Monitor::Decode()
|
||||||
|
|
||||||
void Monitor::TimestampImage(Image *ts_image, const timeval &ts_time) const {
|
void Monitor::TimestampImage(Image *ts_image, SystemTimePoint ts_time) const {
|
||||||
if (!label_format[0])
|
if (!label_format[0])
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Expand the strftime macros first
|
// Expand the strftime macros first
|
||||||
char label_time_text[256];
|
char label_time_text[256];
|
||||||
tm ts_tm = {};
|
tm ts_tm = {};
|
||||||
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time.tv_sec, &ts_tm));
|
time_t ts_time_t = std::chrono::system_clock::to_time_t(ts_time);
|
||||||
|
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time_t, &ts_tm));
|
||||||
|
|
||||||
char label_text[1024];
|
char label_text[1024];
|
||||||
const char *s_ptr = label_time_text;
|
const char *s_ptr = label_time_text;
|
||||||
char *d_ptr = label_text;
|
char *d_ptr = label_text;
|
||||||
while ( *s_ptr && ((d_ptr-label_text) < (unsigned int)sizeof(label_text)) ) {
|
|
||||||
|
while (*s_ptr && ((d_ptr - label_text) < (unsigned int) sizeof(label_text))) {
|
||||||
if ( *s_ptr == config.timestamp_code_char[0] ) {
|
if ( *s_ptr == config.timestamp_code_char[0] ) {
|
||||||
bool found_macro = false;
|
bool found_macro = false;
|
||||||
switch ( *(s_ptr+1) ) {
|
switch ( *(s_ptr+1) ) {
|
||||||
|
@ -2752,7 +2730,10 @@ void Monitor::TimestampImage(Image *ts_image, const timeval &ts_time) const {
|
||||||
found_macro = true;
|
found_macro = true;
|
||||||
break;
|
break;
|
||||||
case 'f' :
|
case 'f' :
|
||||||
d_ptr += snprintf(d_ptr, sizeof(label_text)-(d_ptr-label_text), "%02ld", ts_time.tv_usec/10000);
|
typedef std::chrono::duration<int64, std::centi> Centiseconds;
|
||||||
|
Centiseconds centi_sec = std::chrono::duration_cast<Centiseconds>(
|
||||||
|
ts_time.time_since_epoch() - std::chrono::duration_cast<Seconds>(ts_time.time_since_epoch()));
|
||||||
|
d_ptr += snprintf(d_ptr, sizeof(label_text) - (d_ptr - label_text), "%02ld", centi_sec.count());
|
||||||
found_macro = true;
|
found_macro = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -2972,8 +2953,8 @@ bool Monitor::DumpSettings(char *output, bool verbose) {
|
||||||
sprintf(output+strlen(output), "Post Event Count : %d\n", post_event_count );
|
sprintf(output+strlen(output), "Post Event Count : %d\n", post_event_count );
|
||||||
sprintf(output+strlen(output), "Stream Replay Buffer : %d\n", stream_replay_buffer );
|
sprintf(output+strlen(output), "Stream Replay Buffer : %d\n", stream_replay_buffer );
|
||||||
sprintf(output+strlen(output), "Alarm Frame Count : %d\n", alarm_frame_count );
|
sprintf(output+strlen(output), "Alarm Frame Count : %d\n", alarm_frame_count );
|
||||||
sprintf(output+strlen(output), "Section Length : %d\n", section_length);
|
sprintf(output+strlen(output), "Section Length : %" PRIi64 "\n", static_cast<int64>(Seconds(section_length).count()));
|
||||||
sprintf(output+strlen(output), "Min Section Length : %d\n", min_section_length);
|
sprintf(output+strlen(output), "Min Section Length : %" PRIi64 "\n", static_cast<int64>(Seconds(min_section_length).count()));
|
||||||
sprintf(output+strlen(output), "Maximum FPS : %.2f\n", capture_delay ? (double) Microseconds::period::den / capture_delay : 0.0);
|
sprintf(output+strlen(output), "Maximum FPS : %.2f\n", capture_delay ? (double) Microseconds::period::den / capture_delay : 0.0);
|
||||||
sprintf(output+strlen(output), "Alarm Maximum FPS : %.2f\n", alarm_capture_delay ? (double) Microseconds::period::den / alarm_capture_delay : 0.0);
|
sprintf(output+strlen(output), "Alarm Maximum FPS : %.2f\n", alarm_capture_delay ? (double) Microseconds::period::den / alarm_capture_delay : 0.0);
|
||||||
sprintf(output+strlen(output), "Reference Blend %%ge : %d\n", ref_blend_perc);
|
sprintf(output+strlen(output), "Reference Blend %%ge : %d\n", ref_blend_perc);
|
||||||
|
|
|
@ -313,8 +313,8 @@ protected:
|
||||||
int pre_event_count; // How many images to hold and prepend to an alarm event
|
int pre_event_count; // How many images to hold and prepend to an alarm event
|
||||||
int post_event_count; // How many unalarmed images must occur before the alarm state is reset
|
int post_event_count; // How many unalarmed images must occur before the alarm state is reset
|
||||||
int stream_replay_buffer; // How many frames to store to support DVR functions, IGNORED from this object, passed directly into zms now
|
int stream_replay_buffer; // How many frames to store to support DVR functions, IGNORED from this object, passed directly into zms now
|
||||||
int section_length; // How long events should last in continuous modes
|
Seconds section_length; // How long events should last in continuous modes
|
||||||
int min_section_length; // Minimum event length when using event_close_mode == ALARM
|
Seconds min_section_length; // Minimum event length when using event_close_mode == ALARM
|
||||||
bool adaptive_skip; // Whether to use the newer adaptive algorithm for this monitor
|
bool adaptive_skip; // Whether to use the newer adaptive algorithm for this monitor
|
||||||
int frame_skip; // How many frames to skip in continuous modes
|
int frame_skip; // How many frames to skip in continuous modes
|
||||||
int motion_frame_skip; // How many frames to skip in motion detection
|
int motion_frame_skip; // How many frames to skip in motion detection
|
||||||
|
@ -424,7 +424,6 @@ protected:
|
||||||
|
|
||||||
public:
|
public:
|
||||||
explicit Monitor();
|
explicit Monitor();
|
||||||
explicit Monitor(unsigned int p_id);
|
|
||||||
|
|
||||||
~Monitor();
|
~Monitor();
|
||||||
|
|
||||||
|
@ -453,7 +452,7 @@ public:
|
||||||
|
|
||||||
inline unsigned int Id() const { return id; }
|
inline unsigned int Id() const { return id; }
|
||||||
inline const char *Name() const { return name.c_str(); }
|
inline const char *Name() const { return name.c_str(); }
|
||||||
inline unsigned int ServerId() { return server_id; }
|
inline unsigned int ServerId() const { return server_id; }
|
||||||
inline Storage *getStorage() {
|
inline Storage *getStorage() {
|
||||||
if ( ! storage ) {
|
if ( ! storage ) {
|
||||||
storage = new Storage(storage_id);
|
storage = new Storage(storage_id);
|
||||||
|
@ -486,7 +485,7 @@ public:
|
||||||
}
|
}
|
||||||
inline bool Exif() const { return embed_exif; }
|
inline bool Exif() const { return embed_exif; }
|
||||||
inline bool RTSPServer() const { return rtsp_server; }
|
inline bool RTSPServer() const { return rtsp_server; }
|
||||||
inline bool RecordAudio() { return record_audio; }
|
inline bool RecordAudio() const { return record_audio; }
|
||||||
|
|
||||||
/*
|
/*
|
||||||
inline Purpose Purpose() { return purpose };
|
inline Purpose Purpose() { return purpose };
|
||||||
|
@ -529,14 +528,14 @@ public:
|
||||||
AVStream *GetVideoStream() const { return camera ? camera->getVideoStream() : nullptr; };
|
AVStream *GetVideoStream() const { return camera ? camera->getVideoStream() : nullptr; };
|
||||||
AVCodecContext *GetVideoCodecContext() const { return camera ? camera->getVideoCodecContext() : nullptr; };
|
AVCodecContext *GetVideoCodecContext() const { return camera ? camera->getVideoCodecContext() : nullptr; };
|
||||||
|
|
||||||
const std::string GetSecondPath() const { return second_path; };
|
std::string GetSecondPath() const { return second_path; };
|
||||||
const std::string GetVideoFifoPath() const { return shared_data ? shared_data->video_fifo_path : ""; };
|
std::string GetVideoFifoPath() const { return shared_data ? shared_data->video_fifo_path : ""; };
|
||||||
const std::string GetAudioFifoPath() const { return shared_data ? shared_data->audio_fifo_path : ""; };
|
std::string GetAudioFifoPath() const { return shared_data ? shared_data->audio_fifo_path : ""; };
|
||||||
const std::string GetRTSPStreamName() const { return rtsp_streamname; };
|
std::string GetRTSPStreamName() const { return rtsp_streamname; };
|
||||||
|
|
||||||
int GetImage(int32_t index=-1, int scale=100);
|
int GetImage(int32_t index=-1, int scale=100);
|
||||||
ZMPacket *getSnapshot( int index=-1 ) const;
|
ZMPacket *getSnapshot( int index=-1 ) const;
|
||||||
struct timeval GetTimestamp( int index=-1 ) const;
|
SystemTimePoint GetTimestamp(int index = -1) const;
|
||||||
void UpdateAdaptiveSkip();
|
void UpdateAdaptiveSkip();
|
||||||
useconds_t GetAnalysisRate();
|
useconds_t GetAnalysisRate();
|
||||||
unsigned int GetAnalysisUpdateDelay() const { return analysis_update_delay; }
|
unsigned int GetAnalysisUpdateDelay() const { return analysis_update_delay; }
|
||||||
|
@ -553,9 +552,11 @@ public:
|
||||||
void ForceAlarmOff();
|
void ForceAlarmOff();
|
||||||
void CancelForced();
|
void CancelForced();
|
||||||
TriggerState GetTriggerState() const { return trigger_data ? trigger_data->trigger_state : TRIGGER_CANCEL; }
|
TriggerState GetTriggerState() const { return trigger_data ? trigger_data->trigger_state : TRIGGER_CANCEL; }
|
||||||
inline time_t getStartupTime() const { return shared_data->startup_time; }
|
SystemTimePoint GetStartupTime() const { return std::chrono::system_clock::from_time_t(shared_data->startup_time); }
|
||||||
inline void setStartupTime( time_t p_time ) { shared_data->startup_time = p_time; }
|
void SetStartupTime(SystemTimePoint time) { shared_data->startup_time = std::chrono::system_clock::to_time_t(time); }
|
||||||
inline void setHeartbeatTime( time_t p_time ) { shared_data->zmc_heartbeat_time = p_time; }
|
void SetHeartbeatTime(SystemTimePoint time) {
|
||||||
|
shared_data->zmc_heartbeat_time = std::chrono::system_clock::to_time_t(time);
|
||||||
|
}
|
||||||
void get_ref_image();
|
void get_ref_image();
|
||||||
|
|
||||||
int LabelSize() const { return label_size; }
|
int LabelSize() const { return label_size; }
|
||||||
|
@ -586,7 +587,7 @@ public:
|
||||||
bool Analyse();
|
bool Analyse();
|
||||||
bool Decode();
|
bool Decode();
|
||||||
void DumpImage( Image *dump_image ) const;
|
void DumpImage( Image *dump_image ) const;
|
||||||
void TimestampImage(Image *ts_image, const timeval &ts_time) const;
|
void TimestampImage(Image *ts_image, SystemTimePoint ts_time) const;
|
||||||
void closeEvent();
|
void closeEvent();
|
||||||
|
|
||||||
void Reload();
|
void Reload();
|
||||||
|
@ -618,7 +619,7 @@ public:
|
||||||
double get_analysis_fps( ) const {
|
double get_analysis_fps( ) const {
|
||||||
return shared_data ? shared_data->analysis_fps : 0.0;
|
return shared_data ? shared_data->analysis_fps : 0.0;
|
||||||
}
|
}
|
||||||
int Importance() { return importance; }
|
int Importance() const { return importance; }
|
||||||
};
|
};
|
||||||
|
|
||||||
#define MOD_ADD( var, delta, limit ) (((var)+(limit)+(delta))%(limit))
|
#define MOD_ADD( var, delta, limit ) (((var)+(limit)+(delta))%(limit))
|
||||||
|
|
|
@ -377,12 +377,12 @@ bool MonitorStream::sendFrame(const char *filepath, SystemTimePoint timestamp) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
} // end bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp)
|
}
|
||||||
|
|
||||||
bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) {
|
bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) {
|
||||||
Image *send_image = prepareImage(image);
|
Image *send_image = prepareImage(image);
|
||||||
if (!config.timestamp_on_capture) {
|
if (!config.timestamp_on_capture) {
|
||||||
monitor->TimestampImage(send_image, zm::chrono::duration_cast<timeval>(timestamp.time_since_epoch()));
|
monitor->TimestampImage(send_image, timestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
fputs("--" BOUNDARY "\r\n", stdout);
|
fputs("--" BOUNDARY "\r\n", stdout);
|
||||||
|
@ -461,7 +461,7 @@ bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) {
|
||||||
} // Not mpeg
|
} // Not mpeg
|
||||||
last_frame_sent = now;
|
last_frame_sent = now;
|
||||||
return true;
|
return true;
|
||||||
} // end bool MonitorStream::sendFrame( Image *image, const timeval ×tamp )
|
}
|
||||||
|
|
||||||
void MonitorStream::runStream() {
|
void MonitorStream::runStream() {
|
||||||
if (type == STREAM_SINGLE) {
|
if (type == STREAM_SINGLE) {
|
||||||
|
@ -865,8 +865,9 @@ void MonitorStream::SingleImage(int scale) {
|
||||||
scaled_image.Scale(scale);
|
scaled_image.Scale(scale);
|
||||||
snap_image = &scaled_image;
|
snap_image = &scaled_image;
|
||||||
}
|
}
|
||||||
if ( !config.timestamp_on_capture ) {
|
if (!config.timestamp_on_capture) {
|
||||||
monitor->TimestampImage(snap_image, monitor->shared_timestamps[index]);
|
monitor->TimestampImage(snap_image,
|
||||||
|
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(monitor->shared_timestamps[index])));
|
||||||
}
|
}
|
||||||
snap_image->EncodeJpeg(img_buffer, &img_buffer_size);
|
snap_image->EncodeJpeg(img_buffer, &img_buffer_size);
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
#define ZM_MONITORSTREAM_H
|
#define ZM_MONITORSTREAM_H
|
||||||
|
|
||||||
#include "zm_stream.h"
|
#include "zm_stream.h"
|
||||||
#include <sys/time.h>
|
|
||||||
|
|
||||||
class MonitorStream : public StreamBase {
|
class MonitorStream : public StreamBase {
|
||||||
protected:
|
protected:
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
#include "zm_ffmpeg.h"
|
#include "zm_ffmpeg.h"
|
||||||
#include "zm_image.h"
|
#include "zm_image.h"
|
||||||
#include "zm_logger.h"
|
#include "zm_logger.h"
|
||||||
#include <sys/time.h>
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
AVPixelFormat target_format = AV_PIX_FMT_NONE;
|
AVPixelFormat target_format = AV_PIX_FMT_NONE;
|
||||||
|
@ -31,7 +30,6 @@ ZMPacket::ZMPacket() :
|
||||||
stream(nullptr),
|
stream(nullptr),
|
||||||
in_frame(nullptr),
|
in_frame(nullptr),
|
||||||
out_frame(nullptr),
|
out_frame(nullptr),
|
||||||
timestamp({}),
|
|
||||||
buffer(nullptr),
|
buffer(nullptr),
|
||||||
image(nullptr),
|
image(nullptr),
|
||||||
analysis_image(nullptr),
|
analysis_image(nullptr),
|
||||||
|
@ -40,13 +38,13 @@ ZMPacket::ZMPacket() :
|
||||||
image_index(-1),
|
image_index(-1),
|
||||||
codec_imgsize(0),
|
codec_imgsize(0),
|
||||||
pts(0),
|
pts(0),
|
||||||
decoded(0)
|
decoded(false)
|
||||||
{
|
{
|
||||||
av_init_packet(&packet);
|
av_init_packet(&packet);
|
||||||
packet.size = 0; // So we can detect whether it has been filled.
|
packet.size = 0; // So we can detect whether it has been filled.
|
||||||
}
|
}
|
||||||
|
|
||||||
ZMPacket::ZMPacket(Image *i, const timeval &tv) :
|
ZMPacket::ZMPacket(Image *i, SystemTimePoint tv) :
|
||||||
keyframe(0),
|
keyframe(0),
|
||||||
stream(nullptr),
|
stream(nullptr),
|
||||||
in_frame(nullptr),
|
in_frame(nullptr),
|
||||||
|
@ -60,7 +58,7 @@ ZMPacket::ZMPacket(Image *i, const timeval &tv) :
|
||||||
image_index(-1),
|
image_index(-1),
|
||||||
codec_imgsize(0),
|
codec_imgsize(0),
|
||||||
pts(0),
|
pts(0),
|
||||||
decoded(0)
|
decoded(false)
|
||||||
{
|
{
|
||||||
av_init_packet(&packet);
|
av_init_packet(&packet);
|
||||||
packet.size = 0; // So we can detect whether it has been filled.
|
packet.size = 0; // So we can detect whether it has been filled.
|
||||||
|
@ -80,7 +78,7 @@ ZMPacket::ZMPacket(ZMPacket &p) :
|
||||||
image_index(-1),
|
image_index(-1),
|
||||||
codec_imgsize(0),
|
codec_imgsize(0),
|
||||||
pts(0),
|
pts(0),
|
||||||
decoded(0)
|
decoded(false)
|
||||||
{
|
{
|
||||||
av_init_packet(&packet);
|
av_init_packet(&packet);
|
||||||
packet.size = 0;
|
packet.size = 0;
|
||||||
|
@ -95,8 +93,8 @@ ZMPacket::~ZMPacket() {
|
||||||
if (in_frame) av_frame_free(&in_frame);
|
if (in_frame) av_frame_free(&in_frame);
|
||||||
if (out_frame) av_frame_free(&out_frame);
|
if (out_frame) av_frame_free(&out_frame);
|
||||||
if (buffer) av_freep(&buffer);
|
if (buffer) av_freep(&buffer);
|
||||||
if (analysis_image) delete analysis_image;
|
delete analysis_image;
|
||||||
if (image) delete image;
|
delete image;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* returns < 0 on error, 0 on not ready, int bytes consumed on success
|
/* returns < 0 on error, 0 on not ready, int bytes consumed on success
|
||||||
|
@ -243,8 +241,8 @@ AVPacket *ZMPacket::set_packet(AVPacket *p) {
|
||||||
if (zm_av_packet_ref(&packet, p) < 0) {
|
if (zm_av_packet_ref(&packet, p) < 0) {
|
||||||
Error("error refing packet");
|
Error("error refing packet");
|
||||||
}
|
}
|
||||||
//ZM_DUMP_PACKET(packet, "zmpacket:");
|
|
||||||
gettimeofday(×tamp, nullptr);
|
timestamp = std::chrono::system_clock::now();
|
||||||
keyframe = p->flags & AV_PKT_FLAG_KEY;
|
keyframe = p->flags & AV_PKT_FLAG_KEY;
|
||||||
return &packet;
|
return &packet;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
#define ZM_PACKET_H
|
#define ZM_PACKET_H
|
||||||
|
|
||||||
#include "zm_logger.h"
|
#include "zm_logger.h"
|
||||||
|
#include "zm_time.h"
|
||||||
#include "zm_zone.h"
|
#include "zm_zone.h"
|
||||||
|
|
||||||
#include <condition_variable>
|
#include <condition_variable>
|
||||||
|
@ -31,10 +32,6 @@ extern "C" {
|
||||||
#include <libavformat/avformat.h>
|
#include <libavformat/avformat.h>
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef __FreeBSD__
|
|
||||||
#include <sys/time.h>
|
|
||||||
#endif // __FreeBSD__
|
|
||||||
|
|
||||||
class Image;
|
class Image;
|
||||||
|
|
||||||
class ZMPacket {
|
class ZMPacket {
|
||||||
|
@ -49,7 +46,7 @@ class ZMPacket {
|
||||||
AVPacket packet; // Input packet, undecoded
|
AVPacket packet; // Input packet, undecoded
|
||||||
AVFrame *in_frame; // Input image, decoded Theoretically only filled if needed.
|
AVFrame *in_frame; // Input image, decoded Theoretically only filled if needed.
|
||||||
AVFrame *out_frame; // output image, Only filled if needed.
|
AVFrame *out_frame; // output image, Only filled if needed.
|
||||||
timeval timestamp;
|
SystemTimePoint timestamp;
|
||||||
uint8_t *buffer; // buffer used in image
|
uint8_t *buffer; // buffer used in image
|
||||||
Image *image;
|
Image *image;
|
||||||
Image *analysis_image;
|
Image *analysis_image;
|
||||||
|
@ -70,7 +67,7 @@ class ZMPacket {
|
||||||
|
|
||||||
int is_keyframe() { return keyframe; };
|
int is_keyframe() { return keyframe; };
|
||||||
int decode( AVCodecContext *ctx );
|
int decode( AVCodecContext *ctx );
|
||||||
explicit ZMPacket(Image *image, const timeval &tv);
|
explicit ZMPacket(Image *image, SystemTimePoint tv);
|
||||||
explicit ZMPacket(ZMPacket &packet);
|
explicit ZMPacket(ZMPacket &packet);
|
||||||
ZMPacket();
|
ZMPacket();
|
||||||
~ZMPacket();
|
~ZMPacket();
|
||||||
|
|
|
@ -989,17 +989,16 @@ int VideoStore::writeVideoFramePacket(const std::shared_ptr<ZMPacket> &zm_packet
|
||||||
//zm_packet->out_frame->key_frame = zm_packet->keyframe;
|
//zm_packet->out_frame->key_frame = zm_packet->keyframe;
|
||||||
frame->pkt_duration = 0;
|
frame->pkt_duration = 0;
|
||||||
|
|
||||||
SystemTimePoint packet_ts = SystemTimePoint(zm::chrono::duration_cast<Microseconds>(zm_packet->timestamp));
|
|
||||||
if (!video_first_pts) {
|
if (!video_first_pts) {
|
||||||
video_first_pts = packet_ts.time_since_epoch().count();
|
video_first_pts = zm_packet->timestamp.time_since_epoch().count();
|
||||||
Debug(2, "No video_first_pts, set to (%" PRId64 ") secs(%.2f)",
|
Debug(2, "No video_first_pts, set to (%" PRId64 ") secs(%.2f)",
|
||||||
video_first_pts,
|
video_first_pts,
|
||||||
FPSeconds(packet_ts.time_since_epoch()).count());
|
FPSeconds(zm_packet->timestamp.time_since_epoch()).count());
|
||||||
|
|
||||||
frame->pts = 0;
|
frame->pts = 0;
|
||||||
} else {
|
} else {
|
||||||
Microseconds useconds = std::chrono::duration_cast<Microseconds>(
|
Microseconds useconds = std::chrono::duration_cast<Microseconds>(
|
||||||
packet_ts - SystemTimePoint(Microseconds(video_first_pts)));
|
zm_packet->timestamp - SystemTimePoint(Microseconds(video_first_pts)));
|
||||||
frame->pts = av_rescale_q(useconds.count(), AV_TIME_BASE_Q, video_out_ctx->time_base);
|
frame->pts = av_rescale_q(useconds.count(), AV_TIME_BASE_Q, video_out_ctx->time_base);
|
||||||
Debug(2,
|
Debug(2,
|
||||||
"Setting pts for frame(%d) to (%" PRId64 ") from (start %" PRIu64 " - %" PRIu64 " - us(%" PRIi64 ") @ %d/%d",
|
"Setting pts for frame(%d) to (%" PRId64 ") from (start %" PRIu64 " - %" PRIu64 " - us(%" PRIi64 ") @ %d/%d",
|
||||||
|
@ -1007,7 +1006,7 @@ int VideoStore::writeVideoFramePacket(const std::shared_ptr<ZMPacket> &zm_packet
|
||||||
frame->pts,
|
frame->pts,
|
||||||
video_first_pts,
|
video_first_pts,
|
||||||
static_cast<int64>(std::chrono::duration_cast<Microseconds>(useconds).count()),
|
static_cast<int64>(std::chrono::duration_cast<Microseconds>(useconds).count()),
|
||||||
static_cast<int64>(std::chrono::duration_cast<Microseconds>(packet_ts.time_since_epoch()).count()),
|
static_cast<int64>(std::chrono::duration_cast<Microseconds>(zm_packet->timestamp.time_since_epoch()).count()),
|
||||||
video_out_ctx->time_base.num,
|
video_out_ctx->time_base.num,
|
||||||
video_out_ctx->time_base.den);
|
video_out_ctx->time_base.den);
|
||||||
}
|
}
|
||||||
|
|
|
@ -242,9 +242,9 @@ int main(int argc, char *argv[]) {
|
||||||
if (!monitor->connect()) {
|
if (!monitor->connect()) {
|
||||||
Warning("Couldn't connect to monitor %d", monitor->Id());
|
Warning("Couldn't connect to monitor %d", monitor->Id());
|
||||||
}
|
}
|
||||||
time_t now = (time_t)time(nullptr);
|
SystemTimePoint now = std::chrono::system_clock::now();
|
||||||
monitor->setStartupTime(now);
|
monitor->SetStartupTime(now);
|
||||||
monitor->setHeartbeatTime(now);
|
monitor->SetHeartbeatTime(now);
|
||||||
|
|
||||||
snprintf(sql, sizeof(sql),
|
snprintf(sql, sizeof(sql),
|
||||||
"INSERT INTO Monitor_Status (MonitorId,Status,CaptureFPS,AnalysisFPS)"
|
"INSERT INTO Monitor_Status (MonitorId,Status,CaptureFPS,AnalysisFPS)"
|
||||||
|
|
37
src/zmu.cpp
37
src/zmu.cpp
|
@ -498,20 +498,27 @@ int main(int argc, char *argv[]) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if ( function & ZMU_TIME ) {
|
if ( function & ZMU_TIME ) {
|
||||||
struct timeval timestamp = monitor->GetTimestamp(image_idx);
|
SystemTimePoint timestamp = monitor->GetTimestamp(image_idx);
|
||||||
if ( verbose ) {
|
if (verbose) {
|
||||||
char timestamp_str[64] = "None";
|
char timestamp_str[64] = "None";
|
||||||
if ( timestamp.tv_sec ) {
|
if (timestamp.time_since_epoch() != Seconds(0)) {
|
||||||
tm tm_info = {};
|
tm tm_info = {};
|
||||||
strftime(timestamp_str, sizeof(timestamp_str), "%Y-%m-%d %H:%M:%S", localtime_r(×tamp.tv_sec, &tm_info));
|
time_t timestamp_t = std::chrono::system_clock::to_time_t(timestamp);
|
||||||
|
strftime(timestamp_str, sizeof(timestamp_str), "%Y-%m-%d %H:%M:%S", localtime_r(×tamp_t, &tm_info));
|
||||||
|
}
|
||||||
|
Seconds ts_sec = std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch());
|
||||||
|
Microseconds ts_usec = std::chrono::duration_cast<Microseconds>(timestamp.time_since_epoch() - ts_sec);
|
||||||
|
if (image_idx == -1) {
|
||||||
|
printf("Time of last image capture: %s.%02d\n", timestamp_str, static_cast<int32>(ts_usec.count()));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
printf("Time of image %d capture: %s.%02d\n", image_idx, timestamp_str, static_cast<int32>(ts_usec.count()));
|
||||||
}
|
}
|
||||||
if ( image_idx == -1 )
|
|
||||||
printf("Time of last image capture: %s.%02ld\n", timestamp_str, timestamp.tv_usec/10000);
|
|
||||||
else
|
|
||||||
printf("Time of image %d capture: %s.%02ld\n", image_idx, timestamp_str, timestamp.tv_usec/10000);
|
|
||||||
} else {
|
} else {
|
||||||
if ( have_output ) fputc(separator, stdout);
|
if (have_output) {
|
||||||
printf("%ld.%02ld", timestamp.tv_sec, timestamp.tv_usec/10000);
|
fputc(separator, stdout);
|
||||||
|
}
|
||||||
|
printf("%.2f", FPSeconds(timestamp.time_since_epoch()).count());
|
||||||
have_output = true;
|
have_output = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -740,13 +747,14 @@ int main(int argc, char *argv[]) {
|
||||||
if ( monitor_function > 1 ) {
|
if ( monitor_function > 1 ) {
|
||||||
std::shared_ptr<Monitor> monitor = Monitor::Load(monitor_id, false, Monitor::QUERY);
|
std::shared_ptr<Monitor> monitor = Monitor::Load(monitor_id, false, Monitor::QUERY);
|
||||||
if ( monitor && monitor->connect() ) {
|
if ( monitor && monitor->connect() ) {
|
||||||
struct timeval tv = monitor->GetTimestamp();
|
SystemTimePoint timestamp = monitor->GetTimestamp();
|
||||||
printf( "%4d%5d%6d%9d%11ld.%02ld%6d%6d%8" PRIu64 "%8.2f\n",
|
|
||||||
|
printf( "%4d%5d%6d%9d%14.2f%6d%6d%8" PRIu64 "%8.2f\n",
|
||||||
monitor->Id(),
|
monitor->Id(),
|
||||||
monitor_function,
|
monitor_function,
|
||||||
monitor->GetState(),
|
monitor->GetState(),
|
||||||
monitor->GetTriggerState(),
|
monitor->GetTriggerState(),
|
||||||
tv.tv_sec, tv.tv_usec/10000,
|
FPSeconds(timestamp.time_since_epoch()).count(),
|
||||||
monitor->GetLastReadIndex(),
|
monitor->GetLastReadIndex(),
|
||||||
monitor->GetLastWriteIndex(),
|
monitor->GetLastWriteIndex(),
|
||||||
monitor->GetLastEventId(),
|
monitor->GetLastEventId(),
|
||||||
|
@ -754,13 +762,12 @@ int main(int argc, char *argv[]) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
struct timeval tv = { 0, 0 };
|
|
||||||
printf("%4d%5d%6d%9d%11ld.%02ld%6d%6d%8d%8.2f\n",
|
printf("%4d%5d%6d%9d%11ld.%02ld%6d%6d%8d%8.2f\n",
|
||||||
mon_id,
|
mon_id,
|
||||||
function,
|
function,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
tv.tv_sec, tv.tv_usec/10000,
|
0l, 0l,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
|
|
Loading…
Reference in New Issue