Merge pull request #3297 from Carbenium/time-misc

Convert the rest of the codebase to std::chrono
This commit is contained in:
Isaac Connor 2021-06-13 17:47:40 -04:00 committed by GitHub
commit fa11c20bf8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 441 additions and 471 deletions

View File

@ -74,11 +74,13 @@ int Buffer::read_into(int sd, unsigned int bytes) {
return bytes_read;
}
int Buffer::read_into(int sd, unsigned int bytes, struct timeval timeout) {
int Buffer::read_into(int sd, unsigned int bytes, Microseconds timeout) {
fd_set set;
FD_ZERO(&set); /* clear the set */
FD_SET(sd, &set); /* add our file descriptor to the set */
int rv = select(sd + 1, &set, NULL, NULL, &timeout);
timeval timeout_tv = zm::chrono::duration_cast<timeval>(timeout);
int rv = select(sd + 1, &set, nullptr, nullptr, &timeout_tv);
if (rv == -1) {
Error("Error %d %s from select", errno, strerror(errno));
return rv;
@ -86,5 +88,6 @@ int Buffer::read_into(int sd, unsigned int bytes, struct timeval timeout) {
Debug(1, "timeout"); /* a timeout occured */
return 0;
}
return read_into(sd, bytes);
}

View File

@ -21,6 +21,7 @@
#define ZM_BUFFER_H
#include "zm_logger.h"
#include "zm_time.h"
#include <cstring>
class Buffer {
@ -187,7 +188,7 @@ class Buffer {
return static_cast<int>(mSize);
}
int read_into(int sd, unsigned int bytes);
int read_into(int sd, unsigned int bytes, struct timeval timeout);
int read_into(int sd, unsigned int bytes, Microseconds timeout);
};
#endif // ZM_BUFFER_H

View File

@ -615,19 +615,7 @@ bool zm::TcpUnixServer::accept(TcpUnixSocket *&newSocket) {
return true;
}
void zm::Select::setTimeout(int timeout) {
mTimeout.tv_sec = timeout;
mTimeout.tv_usec = 0;
mHasTimeout = true;
}
void zm::Select::setTimeout(double timeout) {
mTimeout.tv_sec = int(timeout);
mTimeout.tv_usec = suseconds_t((timeout - mTimeout.tv_sec) * 1000000.0);
mHasTimeout = true;
}
void zm::Select::setTimeout(timeval timeout) {
void zm::Select::setTimeout(Microseconds timeout) {
mTimeout = timeout;
mHasTimeout = true;
}
@ -703,7 +691,7 @@ void zm::Select::clearWriters() {
}
int zm::Select::wait() {
timeval tempTimeout = mTimeout;
timeval tempTimeout = zm::chrono::duration_cast<timeval>(mTimeout);
timeval *selectTimeout = mHasTimeout ? &tempTimeout : nullptr;
fd_set rfds;

View File

@ -22,6 +22,7 @@
#include "zm_exception.h"
#include "zm_logger.h"
#include "zm_time.h"
#include <cerrno>
#include <netdb.h>
#include <set>
@ -560,13 +561,9 @@ class Select {
typedef std::vector<CommsBase *> CommsList;
Select() : mHasTimeout(false), mMaxFd(-1) {}
explicit Select(timeval timeout) : mMaxFd(-1) { setTimeout(timeout); }
explicit Select(int timeout) : mMaxFd(-1) { setTimeout(timeout); }
explicit Select(double timeout) : mMaxFd(-1) { setTimeout(timeout); }
explicit Select(Microseconds timeout) : mMaxFd(-1) { setTimeout(timeout); }
void setTimeout(int timeout);
void setTimeout(double timeout);
void setTimeout(timeval timeout);
void setTimeout(Microseconds timeout);
void clearTimeout();
void calcMaxFd();
@ -590,7 +587,7 @@ class Select {
CommsList mReadable;
CommsList mWriteable;
bool mHasTimeout;
timeval mTimeout;
Microseconds mTimeout;
int mMaxFd;
};

View File

@ -43,13 +43,13 @@ Event::PreAlarmData Event::pre_alarm_data[MAX_PRE_ALARM_FRAMES] = {};
Event::Event(
Monitor *p_monitor,
struct timeval p_start_time,
SystemTimePoint p_start_time,
const std::string &p_cause,
const StringSetMap &p_noteSetMap
) :
id(0),
monitor(p_monitor),
start_time(SystemTimePoint(zm::chrono::duration_cast<Microseconds>(p_start_time))),
start_time(p_start_time),
end_time(),
cause(p_cause),
noteSetMap(p_noteSetMap),
@ -220,7 +220,7 @@ Event::Event(
zmDbDo(sql.c_str());
}
} // end if GetOptVideoWriter
} // Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string &p_cause, const StringSetMap &p_noteSetMap, bool p_videoEvent )
}
Event::~Event() {
// We close the videowriter first, because if we finish the event, we might try to view the file, but we aren't done writing it yet.
@ -282,20 +282,14 @@ void Event::createNotes(std::string &notes) {
}
} // void Event::createNotes(std::string &notes)
bool Event::WriteFrameImage(
Image *image,
timeval timestamp,
const char *event_file,
bool alarm_frame) const {
bool Event::WriteFrameImage(Image *image, SystemTimePoint timestamp, const char *event_file, bool alarm_frame) const {
int thisquality =
(alarm_frame && (config.jpeg_alarm_file_quality > config.jpeg_file_quality)) ?
config.jpeg_alarm_file_quality : 0; // quality to use, zero is default
bool rc;
SystemTimePoint jpeg_timestamp =
monitor->Exif() ? SystemTimePoint(zm::chrono::duration_cast<Microseconds>(timestamp)) : SystemTimePoint();
SystemTimePoint jpeg_timestamp = monitor->Exif() ? timestamp : SystemTimePoint();
if (!config.timestamp_on_capture) {
// stash the image we plan to use in another pointer regardless if timestamped.
@ -309,7 +303,7 @@ bool Event::WriteFrameImage(
}
return rc;
} // end Event::WriteFrameImage( Image *image, struct timeval timestamp, const char *event_file, bool alarm_frame )
}
bool Event::WritePacket(const std::shared_ptr<ZMPacket>&packet) {
if (videoStore->writePacket(packet) < 0)
@ -439,7 +433,7 @@ void Event::AddPacket(const std::shared_ptr<ZMPacket>&packet) {
if ((packet->codec_type == AVMEDIA_TYPE_VIDEO) or packet->image) {
AddFrame(packet->image, packet->timestamp, packet->zone_stats, packet->score, packet->analysis_image);
}
end_time = SystemTimePoint(zm::chrono::duration_cast<Microseconds>(packet->timestamp));
end_time = packet->timestamp;
}
void Event::WriteDbFrames() {
@ -456,7 +450,7 @@ void Event::WriteDbFrames() {
frame_insert_sql += stringtf("\n( %" PRIu64 ", %d, '%s', from_unixtime( %ld ), %.2f, %d ),",
id, frame->frame_id,
frame_type_names[frame->type],
frame->timestamp.tv_sec,
std::chrono::system_clock::to_time_t(frame->timestamp),
std::chrono::duration_cast<FPSeconds>(frame->delta).count(),
frame->score);
if (config.record_event_stats and frame->zone_stats.size()) {
@ -493,13 +487,12 @@ void Event::WriteDbFrames() {
}
} // end void Event::WriteDbFrames()
void Event::AddFrame(
Image *image,
struct timeval timestamp,
const std::vector<ZoneStats> &zone_stats,
int score,
Image *alarm_image) {
if (!timestamp.tv_sec) {
void Event::AddFrame(Image *image,
SystemTimePoint timestamp,
const std::vector<ZoneStats> &zone_stats,
int score,
Image *alarm_image) {
if (timestamp.time_since_epoch() == Seconds(0)) {
Warning("Not adding new frame, zero timestamp");
return;
}
@ -576,12 +569,10 @@ void Event::AddFrame(
or ( monitor_state == Monitor::ALARM )
or ( monitor_state == Monitor::PREALARM );
SystemTimePoint timestamp_us = SystemTimePoint(zm::chrono::duration_cast<Microseconds>(timestamp));
if (db_frame) {
Microseconds delta_time = std::chrono::duration_cast<Microseconds>(timestamp_us - start_time);
Microseconds delta_time = std::chrono::duration_cast<Microseconds>(timestamp - start_time);
Debug(1, "Frame delta is %.2f s - %.2f s = %.2f s, score %u zone_stats.size %zu",
FPSeconds(timestamp_us.time_since_epoch()).count(),
FPSeconds(timestamp.time_since_epoch()).count(),
FPSeconds(start_time.time_since_epoch()).count(),
FPSeconds(delta_time).count(),
score,
@ -621,8 +612,8 @@ void Event::AddFrame(
if (score > (int) max_score) {
max_score = score;
}
end_time = timestamp_us;
} // end void Event::AddFrame(Image *image, struct timeval timestamp, int score, Image *alarm_image)
end_time = timestamp;
}
bool Event::SetPath(Storage *storage) {
scheme = storage->Scheme();

View File

@ -96,12 +96,10 @@ class Event {
static bool OpenFrameSocket(int);
static bool ValidateFrameSocket(int);
Event(
Monitor *p_monitor,
struct timeval p_start_time,
const std::string &p_cause,
const StringSetMap &p_noteSetMap
);
Event(Monitor *p_monitor,
SystemTimePoint p_start_time,
const std::string &p_cause,
const StringSetMap &p_noteSetMap);
~Event();
uint64_t Id() const { return id; }
@ -109,28 +107,21 @@ class Event {
int Frames() const { return frames; }
int AlarmFrames() const { return alarm_frames; }
timeval StartTime() const { return zm::chrono::duration_cast<timeval>(start_time.time_since_epoch()); }
timeval EndTime() const { return zm::chrono::duration_cast<timeval>(end_time.time_since_epoch()); }
SystemTimePoint StartTime() const { return start_time; }
SystemTimePoint EndTime() const { return end_time; }
void AddPacket(const std::shared_ptr<ZMPacket> &p);
bool WritePacket(const std::shared_ptr<ZMPacket> &p);
bool SendFrameImage(const Image *image, bool alarm_frame=false);
bool WriteFrameImage(
Image *image,
struct timeval timestamp,
const char *event_file,
bool alarm_frame=false
) const;
bool WriteFrameImage(Image *image, SystemTimePoint timestamp, const char *event_file, bool alarm_frame = false) const;
void updateNotes(const StringSetMap &stringSetMap);
void AddFrame(
Image *image,
struct timeval timestamp,
const std::vector<ZoneStats> &stats,
int score=0,
Image *alarm_image=nullptr
);
void AddFrame(Image *image,
SystemTimePoint timestamp,
const std::vector<ZoneStats> &stats,
int score = 0,
Image *alarm_image = nullptr);
private:
void WriteDbFrames();
@ -174,7 +165,7 @@ class Event {
}
static void AddPreAlarmFrame(
Image *image,
struct timeval timestamp,
SystemTimePoint timestamp,
int score=0,
Image *alarm_frame=nullptr
) {

View File

@ -27,16 +27,10 @@
extern "C" {
#include <libavutil/time.h>
#if HAVE_LIBAVUTIL_HWCONTEXT_H
#include <libavutil/hwcontext.h>
#endif
#include <libavutil/pixdesc.h>
}
#include <string>
TimePoint start_read_time;
time_t start_read_time;
#if HAVE_LIBAVUTIL_HWCONTEXT_H
#if LIBAVCODEC_VERSION_CHECK(57, 89, 0, 89, 0)
static enum AVPixelFormat hw_pix_fmt;
@ -169,7 +163,7 @@ FfmpegCamera::~FfmpegCamera() {
}
int FfmpegCamera::PrimeCapture() {
start_read_time = time(nullptr);
start_read_time = std::chrono::steady_clock::now();
if ( mCanCapture ) {
Debug(1, "Priming capture from %s, Closing", mPath.c_str());
Close();
@ -188,7 +182,7 @@ int FfmpegCamera::PreCapture() {
int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
if (!mCanCapture) return -1;
start_read_time = time(nullptr);
start_read_time = std::chrono::steady_clock::now();
int ret;
AVFormatContext *formatContextPtr;
@ -558,11 +552,12 @@ int FfmpegCamera::FfmpegInterruptCallback(void *ctx) {
Debug(1, "Received terminate in cb");
return zm_terminate;
}
time_t now = time(nullptr);
if (now - start_read_time > 10) {
Debug(1, "timeout in ffmpeg camera now %" PRIi64 " - %" PRIi64 " > 10",
static_cast<int64>(now),
static_cast<int64>(start_read_time));
TimePoint now = std::chrono::steady_clock::now();
if (now - start_read_time > Seconds(10)) {
Debug(1, "timeout in ffmpeg camera now %" PRIi64 " - %" PRIi64 " > 10 s",
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(start_read_time.time_since_epoch()).count()));
return 1;
}
return 0;

View File

@ -269,4 +269,4 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id, double at) {
}
return get_frame(stream_id);
} // end AVFrame *FFmpeg_Input::get_frame( int stream_id, struct timeval at)
}

View File

@ -21,7 +21,6 @@
#include "zm_packet.h"
#include <sys/stat.h>
#include <unistd.h>
FileCamera::FileCamera(
const Monitor *monitor,
@ -71,8 +70,8 @@ void FileCamera::Terminate() {
}
int FileCamera::PreCapture() {
struct stat statbuf;
if ( stat(path, &statbuf) < 0 ) {
struct stat statbuf = {};
if (stat(path, &statbuf) < 0) {
Error("Can't stat %s: %s", path, strerror(errno));
return -1;
}
@ -81,8 +80,8 @@ int FileCamera::PreCapture() {
// This waits until 1 second has passed since it was modified. Effectively limiting fps to 60.
// Which is kinda bogus. If we were writing to this jpg constantly faster than we are monitoring it here
// we would never break out of this loop
while ( (time(nullptr) - statbuf.st_mtime) < 1 ) {
usleep(100000);
while ((time(nullptr) - statbuf.st_mtime) < 1) {
std::this_thread::sleep_for(Milliseconds(100));
}
return 0;
}

View File

@ -3,7 +3,7 @@
Frame::Frame(event_id_t p_event_id,
int p_frame_id,
FrameType p_type,
struct timeval p_timestamp,
SystemTimePoint p_timestamp,
Microseconds p_delta,
int p_score,
std::vector<ZoneStats> p_stats)

View File

@ -23,8 +23,6 @@
#include "zm_event.h"
#include "zm_time.h"
#include "zm_zone.h"
#include <sys/time.h>
#include <vector>
enum FrameType {
@ -41,7 +39,7 @@ class Frame {
Frame(event_id_t p_event_id,
int p_frame_id,
FrameType p_type,
struct timeval p_timestamp,
SystemTimePoint p_timestamp,
Microseconds p_delta,
int p_score,
std::vector<ZoneStats> p_stats
@ -50,7 +48,7 @@ class Frame {
event_id_t event_id;
int frame_id;
FrameType type;
struct timeval timestamp;
SystemTimePoint timestamp;
Microseconds delta;
int score;
std::vector<ZoneStats> zone_stats;

View File

@ -137,8 +137,9 @@ Monitor::MonitorLink::~MonitorLink() {
}
bool Monitor::MonitorLink::connect() {
if ( !last_connect_time || (time(nullptr) - last_connect_time) > 60 ) {
last_connect_time = time(nullptr);
SystemTimePoint now = std::chrono::system_clock::now();
if (!last_connect_time || (now - std::chrono::system_clock::from_time_t(last_connect_time)) > Seconds(60)) {
last_connect_time = std::chrono::system_clock::to_time_t(now);
mem_size = sizeof(SharedData) + sizeof(TriggerData);
@ -472,16 +473,22 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
Debug(1, "Have camera type %s", CameraType_Strings[type].c_str());
col++;
function = (Function)atoi(dbrow[col]); col++;
enabled = dbrow[col] ? atoi(dbrow[col]) : 0; col++;
decoding_enabled = dbrow[col] ? atoi(dbrow[col]) : 0; col++;
enabled = dbrow[col] ? atoi(dbrow[col]) : false; col++;
decoding_enabled = dbrow[col] ? atoi(dbrow[col]) : false; col++;
ReloadLinkedMonitors(dbrow[col]); col++;
/* "AnalysisFPSLimit, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS," */
analysis_fps_limit = dbrow[col] ? strtod(dbrow[col], nullptr) : 0.0; col++;
analysis_update_delay = strtoul(dbrow[col++], nullptr, 0);
capture_delay = (dbrow[col] && atof(dbrow[col]) > 0.0) ? int(Microseconds::period::den / atof(dbrow[col])) : 0; col++;
alarm_capture_delay = (dbrow[col] && atof(dbrow[col]) > 0.0) ? int(Microseconds::period::den / atof(dbrow[col])) : 0; col++;
analysis_update_delay = Seconds(strtoul(dbrow[col++], nullptr, 0));
capture_delay =
(dbrow[col] && atof(dbrow[col]) > 0.0) ? std::chrono::duration_cast<Microseconds>(FPSeconds(1 / atof(dbrow[col])))
: Microseconds(0);
col++;
alarm_capture_delay =
(dbrow[col] && atof(dbrow[col]) > 0.0) ? std::chrono::duration_cast<Microseconds>(FPSeconds(1 / atof(dbrow[col])))
: Microseconds(0);
col++;
/* "Device, Channel, Format, V4LMultiBuffer, V4LCapturesPerFrame, " // V4L Settings */
device = dbrow[col] ? dbrow[col] : ""; col++;
@ -569,8 +576,8 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
else if (alarm_frame_count > MAX_PRE_ALARM_FRAMES) alarm_frame_count = MAX_PRE_ALARM_FRAMES;
/* "SectionLength, MinSectionLength, FrameSkip, MotionFrameSkip, " */
section_length = atoi(dbrow[col]); col++;
min_section_length = atoi(dbrow[col]); col++;
section_length = Seconds(atoi(dbrow[col])); col++;
min_section_length = Seconds(atoi(dbrow[col])); col++;
frame_skip = atoi(dbrow[col]); col++;
motion_frame_skip = atoi(dbrow[col]); col++;
@ -1170,7 +1177,7 @@ int Monitor::GetImage(int32_t index, int scale) {
}
if (!config.timestamp_on_capture) {
TimestampImage(&alarm_image, shared_timestamps[index]);
TimestampImage(&alarm_image, SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
}
image = &alarm_image;
} else {
@ -1192,20 +1199,20 @@ ZMPacket *Monitor::getSnapshot(int index) const {
return nullptr;
}
if (index != image_buffer_count) {
return new ZMPacket(image_buffer[index], shared_timestamps[index]);
return new ZMPacket(image_buffer[index],
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
} else {
Error("Unable to generate image, no images in buffer");
}
return nullptr;
}
struct timeval Monitor::GetTimestamp(int index) const {
SystemTimePoint Monitor::GetTimestamp(int index) const {
ZMPacket *packet = getSnapshot(index);
if (packet)
return packet->timestamp;
static struct timeval null_tv = { 0, 0 };
return null_tv;
return {};
}
unsigned int Monitor::GetLastReadIndex() const {
@ -1302,14 +1309,14 @@ void Monitor::actionResume() {
}
int Monitor::actionBrightness(int p_brightness) {
if ( purpose != CAPTURE ) {
if ( p_brightness >= 0 ) {
if (purpose != CAPTURE) {
if (p_brightness >= 0) {
shared_data->brightness = p_brightness;
shared_data->action |= SET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & SET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & SET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to set brightness");
return -1;
@ -1318,9 +1325,9 @@ int Monitor::actionBrightness(int p_brightness) {
} else {
shared_data->action |= GET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & GET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & GET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to get brightness");
return -1;
@ -1333,14 +1340,14 @@ int Monitor::actionBrightness(int p_brightness) {
} // end int Monitor::actionBrightness(int p_brightness)
int Monitor::actionContrast(int p_contrast) {
if ( purpose != CAPTURE ) {
if ( p_contrast >= 0 ) {
if (purpose != CAPTURE) {
if (p_contrast >= 0) {
shared_data->contrast = p_contrast;
shared_data->action |= SET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & SET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & SET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to set contrast");
return -1;
@ -1349,9 +1356,9 @@ int Monitor::actionContrast(int p_contrast) {
} else {
shared_data->action |= GET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & GET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & GET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to get contrast");
return -1;
@ -1364,14 +1371,14 @@ int Monitor::actionContrast(int p_contrast) {
} // end int Monitor::actionContrast(int p_contrast)
int Monitor::actionHue(int p_hue) {
if ( purpose != CAPTURE ) {
if ( p_hue >= 0 ) {
if (purpose != CAPTURE) {
if (p_hue >= 0) {
shared_data->hue = p_hue;
shared_data->action |= SET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & SET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & SET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to set hue");
return -1;
@ -1380,9 +1387,9 @@ int Monitor::actionHue(int p_hue) {
} else {
shared_data->action |= GET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & GET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & GET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to get hue");
return -1;
@ -1395,14 +1402,14 @@ int Monitor::actionHue(int p_hue) {
} // end int Monitor::actionHue(int p_hue)
int Monitor::actionColour(int p_colour) {
if ( purpose != CAPTURE ) {
if ( p_colour >= 0 ) {
if (purpose != CAPTURE) {
if (p_colour >= 0) {
shared_data->colour = p_colour;
shared_data->action |= SET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & SET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & SET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to set colour");
return -1;
@ -1411,9 +1418,9 @@ int Monitor::actionColour(int p_colour) {
} else {
shared_data->action |= GET_SETTINGS;
int wait_loops = 10;
while ( shared_data->action & GET_SETTINGS ) {
if ( wait_loops-- ) {
usleep(100000);
while (shared_data->action & GET_SETTINGS) {
if (wait_loops--) {
std::this_thread::sleep_for(Milliseconds(100));
} else {
Warning("Timed out waiting to get colour");
return -1;
@ -1859,7 +1866,7 @@ bool Monitor::Analyse() {
}
} // end if decoding enabled
struct timeval *timestamp = &snap->timestamp;
SystemTimePoint timestamp = snap->timestamp;
if (Active() and (function == MODECT or function == MOCORD)) {
Debug(3, "signal and active and modect");
@ -1922,23 +1929,18 @@ bool Monitor::Analyse() {
if (event) {
Debug(2, "Have event %" PRIu64 " in record", event->Id());
if (section_length &&
(( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= section_length)
&& (
((function == MOCORD) && (event_close_mode != CLOSE_TIME))
||
( (function == RECORD) && (event_close_mode == CLOSE_TIME) )
|| ! ( timestamp->tv_sec % section_length )
)
) {
Info("%s: %03d - Closing event %" PRIu64 ", section end forced %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %d",
if (section_length != Seconds(0) && (timestamp - GetVideoWriterStartTime() >= section_length)
&& ((function == MOCORD && event_close_mode != CLOSE_TIME)
|| (function == RECORD && event_close_mode == CLOSE_TIME)
|| timestamp.time_since_epoch() % section_length == Seconds(0))) {
Info("%s: %03d - Closing event %" PRIu64 ", section end forced %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %" PRIi64 ,
name.c_str(),
image_count,
event->Id(),
static_cast<int64>(timestamp->tv_sec),
static_cast<int64>(video_store_data->recording.tv_sec),
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
section_length);
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
static_cast<int64>(Seconds(section_length).count()));
closeEvent();
} // end if section_length
} // end if event
@ -1990,7 +1992,7 @@ bool Monitor::Analyse() {
start_it = nullptr;
} else {
// Create event from current snap
event = new Event(this, *timestamp, "Continuous", noteSetMap);
event = new Event(this, timestamp, "Continuous", noteSetMap);
}
shared_data->last_event_id = event->Id();
@ -2004,7 +2006,8 @@ bool Monitor::Analyse() {
}
alarm_cause = cause+" Continuous "+alarm_cause;
strncpy(shared_data->alarm_cause, alarm_cause.c_str(), sizeof(shared_data->alarm_cause)-1);
video_store_data->recording = event->StartTime();
SetVideoWriterStartTime(event->StartTime());
Info("%s: %03d - Opened new event %" PRIu64 ", section start",
name.c_str(), analysis_image_count, event->Id());
/* To prevent cancelling out an existing alert\prealarm\alarm state */
@ -2019,23 +2022,22 @@ bool Monitor::Analyse() {
if ((state == IDLE) || (state == TAPE) || (state == PREALARM)) {
// If we should end then previous continuous event and start a new non-continuous event
if (event && event->Frames()
&& (!event->AlarmFrames())
&& (event_close_mode == CLOSE_ALARM)
&& ( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= min_section_length )
&& ( (!pre_event_count) || (Event::PreAlarmCount() >= alarm_frame_count-1) )
) {
&& !event->AlarmFrames()
&& event_close_mode == CLOSE_ALARM
&& timestamp - GetVideoWriterStartTime() >= min_section_length
&& (!pre_event_count || Event::PreAlarmCount() >= alarm_frame_count - 1)) {
Info("%s: %03d - Closing event %" PRIu64 ", continuous end, alarm begins",
name.c_str(), image_count, event->Id());
closeEvent();
} else if (event) {
// This is so if we need more than 1 alarm frame before going into alarm, so it is basically if we have enough alarm frames
Debug(3,
"pre_alarm_count in event %d, event frames %d, alarm frames %d event length %" PRIi64 " >=? %d min",
"pre_alarm_count in event %d, event frames %d, alarm frames %d event length %" PRIi64 " >=? %" PRIi64 " min",
Event::PreAlarmCount(),
event->Frames(),
event->AlarmFrames(),
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
min_section_length);
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
static_cast<int64>(Seconds(min_section_length).count()));
}
if ((!pre_event_count) || (Event::PreAlarmCount() >= alarm_frame_count-1)) {
// lets construct alarm cause. It will contain cause + names of zones alarmed
@ -2069,7 +2071,7 @@ bool Monitor::Analyse() {
event = new Event(this, starting_packet->timestamp, cause, noteSetMap);
shared_data->last_event_id = event->Id();
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
video_store_data->recording = event->StartTime();
SetVideoWriterStartTime(event->StartTime());
shared_data->state = state = ALARM;
// Write out starting packets, do not modify packetqueue it will garbage collect itself
@ -2132,11 +2134,8 @@ bool Monitor::Analyse() {
Info("%s: %03d - Gone into alert state", name.c_str(), analysis_image_count);
shared_data->state = state = ALERT;
} else if (state == ALERT) {
if (
( analysis_image_count-last_alarm_count > post_event_count )
&&
( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= min_section_length )
) {
if (analysis_image_count - last_alarm_count > post_event_count
&& timestamp - GetVideoWriterStartTime() >= min_section_length) {
Info("%s: %03d - Left alarm state (%" PRIu64 ") - %d(%d) images",
name.c_str(), analysis_image_count, event->Id(), event->Frames(), event->AlarmFrames());
//if ( function != MOCORD || event_close_mode == CLOSE_ALARM || event->Cause() == SIGNAL_CAUSE )
@ -2154,14 +2153,14 @@ bool Monitor::Analyse() {
shared_data->state = state = ((function != MOCORD) ? IDLE : TAPE);
} else {
Debug(1,
"State %s because image_count(%d)-last_alarm_count(%d) > post_event_count(%d) and timestamp.tv_sec(%" PRIi64 ") - recording.tv_src(%" PRIi64 ") >= min_section_length(%d)",
"State %s because image_count(%d)-last_alarm_count(%d) > post_event_count(%d) and timestamp.tv_sec(%" PRIi64 ") - recording.tv_src(%" PRIi64 ") >= min_section_length(%" PRIi64 ")",
State_Strings[state].c_str(),
analysis_image_count,
last_alarm_count,
post_event_count,
static_cast<int64>(timestamp->tv_sec),
static_cast<int64>(video_store_data->recording.tv_sec),
min_section_length);
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
static_cast<int64>(Seconds(min_section_length).count()));
}
if (Event::PreAlarmCount())
Event::EmptyPreAlarmFrames();
@ -2185,7 +2184,7 @@ bool Monitor::Analyse() {
// incremement pre alarm image count
//have_pre_alarmed_frames ++;
Event::AddPreAlarmFrame(snap->image, *timestamp, score, nullptr);
Event::AddPreAlarmFrame(snap->image, timestamp, score, nullptr);
} else if (state == ALARM) {
for (const Zone &zone : zones) {
if (zone.Alarmed()) {
@ -2199,20 +2198,19 @@ bool Monitor::Analyse() {
if (event) {
if (noteSetMap.size() > 0)
event->updateNotes(noteSetMap);
if ( section_length
&& ( ( timestamp->tv_sec - video_store_data->recording.tv_sec ) >= section_length )
) {
Warning("%s: %03d - event %" PRIu64 ", has exceeded desired section length. %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %d",
if (section_length != Seconds(0) && (timestamp - GetVideoWriterStartTime() >= section_length)) {
Warning("%s: %03d - event %" PRIu64 ", has exceeded desired section length. %" PRIi64 " - %" PRIi64 " = %" PRIi64 " >= %" PRIi64,
name.c_str(), analysis_image_count, event->Id(),
static_cast<int64>(timestamp->tv_sec), static_cast<int64>(video_store_data->recording.tv_sec),
static_cast<int64>(timestamp->tv_sec - video_store_data->recording.tv_sec),
section_length);
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(GetVideoWriterStartTime().time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(timestamp - GetVideoWriterStartTime()).count()),
static_cast<int64>(Seconds(section_length).count()));
closeEvent();
event = new Event(this, *timestamp, cause, noteSetMap);
event = new Event(this, timestamp, cause, noteSetMap);
shared_data->last_event_id = event->Id();
//set up video store data
snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile());
video_store_data->recording = event->StartTime();
SetVideoWriterStartTime(event->StartTime());
}
} else {
Error("ALARM but no event");
@ -2269,7 +2267,7 @@ bool Monitor::Analyse() {
UpdateAnalysisFPS();
}
packetqueue.unlock(packet_lock);
shared_data->last_read_time = time(nullptr);
shared_data->last_read_time = std::chrono::system_clock::to_time_t(std::chrono::system_clock::now());
return true;
} // end Monitor::Analyse
@ -2477,8 +2475,8 @@ int Monitor::Capture() {
std::shared_ptr<ZMPacket> packet = std::make_shared<ZMPacket>();
packet->image_index = image_count;
gettimeofday(&(packet->timestamp), nullptr);
shared_data->zmc_heartbeat_time = packet->timestamp.tv_sec;
packet->timestamp = std::chrono::system_clock::now();
shared_data->zmc_heartbeat_time = std::chrono::system_clock::to_time_t(packet->timestamp);
int captureResult = camera->Capture(packet);
Debug(4, "Back from capture result=%d image count %d", captureResult, image_count);
@ -2495,7 +2493,7 @@ int Monitor::Capture() {
shared_data->last_write_index = index;
shared_data->last_write_time = shared_timestamps[index].tv_sec;
image_buffer[index]->Assign(*capture_image);
shared_timestamps[index] = packet->timestamp;
shared_timestamps[index] = zm::chrono::duration_cast<timeval>(packet->timestamp.time_since_epoch());
delete capture_image;
image_count++;
// What about timestamping it?
@ -2506,7 +2504,7 @@ int Monitor::Capture() {
// If we captured, let's assume signal, Decode will detect further
if (!decoding_enabled) {
shared_data->last_write_index = index;
shared_data->last_write_time = packet->timestamp.tv_sec;
shared_data->last_write_time = std::chrono::system_clock::to_time_t(packet->timestamp);
}
Debug(2, "Have packet stream_index:%d ?= videostream_id: %d q.vpktcount %d event? %d image_count %d",
packet->packet.stream_index, video_stream_id, packetqueue.packet_count(video_stream_id), ( event ? 1 : 0 ), image_count);
@ -2702,28 +2700,31 @@ bool Monitor::Decode() {
}
image_buffer[index]->Assign(*(packet->image));
shared_timestamps[index] = packet->timestamp;
shared_timestamps[index] = zm::chrono::duration_cast<timeval>(packet->timestamp.time_since_epoch());
} // end if have image
packet->decoded = true;
shared_data->signal = (capture_image and signal_check_points) ? CheckSignal(capture_image) : true;
shared_data->last_write_index = index;
shared_data->last_write_time = packet->timestamp.tv_sec;
shared_data->last_write_time = std::chrono::system_clock::to_time_t(packet->timestamp);
packetqueue.unlock(packet_lock);
return true;
} // end bool Monitor::Decode()
void Monitor::TimestampImage(Image *ts_image, const timeval &ts_time) const {
void Monitor::TimestampImage(Image *ts_image, SystemTimePoint ts_time) const {
if (!label_format[0])
return;
// Expand the strftime macros first
char label_time_text[256];
tm ts_tm = {};
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time.tv_sec, &ts_tm));
time_t ts_time_t = std::chrono::system_clock::to_time_t(ts_time);
strftime(label_time_text, sizeof(label_time_text), label_format.c_str(), localtime_r(&ts_time_t, &ts_tm));
char label_text[1024];
const char *s_ptr = label_time_text;
char *d_ptr = label_text;
while ( *s_ptr && ((d_ptr-label_text) < (unsigned int)sizeof(label_text)) ) {
while (*s_ptr && ((d_ptr - label_text) < (unsigned int) sizeof(label_text))) {
if ( *s_ptr == config.timestamp_code_char[0] ) {
bool found_macro = false;
switch ( *(s_ptr+1) ) {
@ -2736,7 +2737,10 @@ void Monitor::TimestampImage(Image *ts_image, const timeval &ts_time) const {
found_macro = true;
break;
case 'f' :
d_ptr += snprintf(d_ptr, sizeof(label_text)-(d_ptr-label_text), "%02ld", ts_time.tv_usec/10000);
typedef std::chrono::duration<int64, std::centi> Centiseconds;
Centiseconds centi_sec = std::chrono::duration_cast<Centiseconds>(
ts_time.time_since_epoch() - std::chrono::duration_cast<Seconds>(ts_time.time_since_epoch()));
d_ptr += snprintf(d_ptr, sizeof(label_text) - (d_ptr - label_text), "%02ld", centi_sec.count());
found_macro = true;
break;
}
@ -2956,10 +2960,10 @@ bool Monitor::DumpSettings(char *output, bool verbose) {
sprintf(output+strlen(output), "Post Event Count : %d\n", post_event_count );
sprintf(output+strlen(output), "Stream Replay Buffer : %d\n", stream_replay_buffer );
sprintf(output+strlen(output), "Alarm Frame Count : %d\n", alarm_frame_count );
sprintf(output+strlen(output), "Section Length : %d\n", section_length);
sprintf(output+strlen(output), "Min Section Length : %d\n", min_section_length);
sprintf(output+strlen(output), "Maximum FPS : %.2f\n", capture_delay ? (double) Microseconds::period::den / capture_delay : 0.0);
sprintf(output+strlen(output), "Alarm Maximum FPS : %.2f\n", alarm_capture_delay ? (double) Microseconds::period::den / alarm_capture_delay : 0.0);
sprintf(output+strlen(output), "Section Length : %" PRIi64 "\n", static_cast<int64>(Seconds(section_length).count()));
sprintf(output+strlen(output), "Min Section Length : %" PRIi64 "\n", static_cast<int64>(Seconds(min_section_length).count()));
sprintf(output+strlen(output), "Maximum FPS : %.2f\n", capture_delay != Seconds(0) ? 1 / FPSeconds(capture_delay).count() : 0.0);
sprintf(output+strlen(output), "Alarm Maximum FPS : %.2f\n", alarm_capture_delay != Seconds(0) ? 1 / FPSeconds(alarm_capture_delay).count() : 0.0);
sprintf(output+strlen(output), "Reference Blend %%ge : %d\n", ref_blend_perc);
sprintf(output+strlen(output), "Alarm Reference Blend %%ge : %d\n", alarm_ref_blend_perc);
sprintf(output+strlen(output), "Track Motion : %d\n", track_motion);
@ -3113,7 +3117,6 @@ void Monitor::get_ref_image() {
// can't analyse it anyways, incremement
packetqueue.increment_it(analysis_it);
}
//usleep(10000);
}
if (zm_terminate)
return;

View File

@ -313,15 +313,15 @@ protected:
int pre_event_count; // How many images to hold and prepend to an alarm event
int post_event_count; // How many unalarmed images must occur before the alarm state is reset
int stream_replay_buffer; // How many frames to store to support DVR functions, IGNORED from this object, passed directly into zms now
int section_length; // How long events should last in continuous modes
int min_section_length; // Minimum event length when using event_close_mode == ALARM
Seconds section_length; // How long events should last in continuous modes
Seconds min_section_length; // Minimum event length when using event_close_mode == ALARM
bool adaptive_skip; // Whether to use the newer adaptive algorithm for this monitor
int frame_skip; // How many frames to skip in continuous modes
int motion_frame_skip; // How many frames to skip in motion detection
double analysis_fps_limit; // Target framerate for video analysis
unsigned int analysis_update_delay; // How long we wait before updating analysis parameters
int capture_delay; // How long we wait between capture frames
int alarm_capture_delay; // How long we wait between capture frames when in alarm state
Microseconds analysis_update_delay; // How long we wait before updating analysis parameters
Microseconds capture_delay; // How long we wait between capture frames
Microseconds alarm_capture_delay; // How long we wait between capture frames when in alarm state
int alarm_frame_count; // How many alarm frames are required before an event is triggered
int alert_to_alarm_frame_count; // How many alarm frames (consecutive score frames) are required to return alarm from alert
// value for now is the same number configured in alarm_frame_count, maybe getting his own parameter some day
@ -424,7 +424,6 @@ protected:
public:
explicit Monitor();
explicit Monitor(unsigned int p_id);
~Monitor();
@ -453,7 +452,7 @@ public:
inline unsigned int Id() const { return id; }
inline const char *Name() const { return name.c_str(); }
inline unsigned int ServerId() { return server_id; }
inline unsigned int ServerId() const { return server_id; }
inline Storage *getStorage() {
if ( ! storage ) {
storage = new Storage(storage_id);
@ -486,7 +485,7 @@ public:
}
inline bool Exif() const { return embed_exif; }
inline bool RTSPServer() const { return rtsp_server; }
inline bool RecordAudio() { return record_audio; }
inline bool RecordAudio() const { return record_audio; }
/*
inline Purpose Purpose() { return purpose };
@ -513,8 +512,12 @@ public:
uint64_t GetVideoWriterEventId() const { return video_store_data->current_event; }
void SetVideoWriterEventId( uint64_t p_event_id ) { video_store_data->current_event = p_event_id; }
struct timeval GetVideoWriterStartTime() const { return video_store_data->recording; }
void SetVideoWriterStartTime(const struct timeval &t) { video_store_data->recording = t; }
SystemTimePoint GetVideoWriterStartTime() const {
return SystemTimePoint(zm::chrono::duration_cast<Microseconds>(video_store_data->recording));
}
void SetVideoWriterStartTime(SystemTimePoint t) {
video_store_data->recording = zm::chrono::duration_cast<timeval>(t.time_since_epoch());
}
unsigned int GetPreEventCount() const { return pre_event_count; };
int32_t GetImageBufferCount() const { return image_buffer_count; };
@ -525,20 +528,20 @@ public:
AVStream *GetVideoStream() const { return camera ? camera->getVideoStream() : nullptr; };
AVCodecContext *GetVideoCodecContext() const { return camera ? camera->getVideoCodecContext() : nullptr; };
const std::string GetSecondPath() const { return second_path; };
const std::string GetVideoFifoPath() const { return shared_data ? shared_data->video_fifo_path : ""; };
const std::string GetAudioFifoPath() const { return shared_data ? shared_data->audio_fifo_path : ""; };
const std::string GetRTSPStreamName() const { return rtsp_streamname; };
std::string GetSecondPath() const { return second_path; };
std::string GetVideoFifoPath() const { return shared_data ? shared_data->video_fifo_path : ""; };
std::string GetAudioFifoPath() const { return shared_data ? shared_data->audio_fifo_path : ""; };
std::string GetRTSPStreamName() const { return rtsp_streamname; };
int GetImage(int32_t index=-1, int scale=100);
ZMPacket *getSnapshot( int index=-1 ) const;
struct timeval GetTimestamp( int index=-1 ) const;
SystemTimePoint GetTimestamp(int index = -1) const;
void UpdateAdaptiveSkip();
useconds_t GetAnalysisRate();
unsigned int GetAnalysisUpdateDelay() const { return analysis_update_delay; }
Microseconds GetAnalysisUpdateDelay() const { return analysis_update_delay; }
unsigned int GetCaptureMaxFPS() const { return capture_max_fps; }
int GetCaptureDelay() const { return capture_delay; }
int GetAlarmCaptureDelay() const { return alarm_capture_delay; }
Microseconds GetCaptureDelay() const { return capture_delay; }
Microseconds GetAlarmCaptureDelay() const { return alarm_capture_delay; }
unsigned int GetLastReadIndex() const;
unsigned int GetLastWriteIndex() const;
uint64_t GetLastEventId() const;
@ -549,9 +552,11 @@ public:
void ForceAlarmOff();
void CancelForced();
TriggerState GetTriggerState() const { return trigger_data ? trigger_data->trigger_state : TRIGGER_CANCEL; }
inline time_t getStartupTime() const { return shared_data->startup_time; }
inline void setStartupTime( time_t p_time ) { shared_data->startup_time = p_time; }
inline void setHeartbeatTime( time_t p_time ) { shared_data->zmc_heartbeat_time = p_time; }
SystemTimePoint GetStartupTime() const { return std::chrono::system_clock::from_time_t(shared_data->startup_time); }
void SetStartupTime(SystemTimePoint time) { shared_data->startup_time = std::chrono::system_clock::to_time_t(time); }
void SetHeartbeatTime(SystemTimePoint time) {
shared_data->zmc_heartbeat_time = std::chrono::system_clock::to_time_t(time);
}
void get_ref_image();
int LabelSize() const { return label_size; }
@ -582,7 +587,7 @@ public:
bool Analyse();
bool Decode();
void DumpImage( Image *dump_image ) const;
void TimestampImage(Image *ts_image, const timeval &ts_time) const;
void TimestampImage(Image *ts_image, SystemTimePoint ts_time) const;
void closeEvent();
void Reload();
@ -614,7 +619,7 @@ public:
double get_analysis_fps( ) const {
return shared_data ? shared_data->analysis_fps : 0.0;
}
int Importance() { return importance; }
int Importance() const { return importance; }
};
#define MOD_ADD( var, delta, limit ) (((var)+(limit)+(delta))%(limit))

View File

@ -377,12 +377,12 @@ bool MonitorStream::sendFrame(const char *filepath, SystemTimePoint timestamp) {
return true;
}
return false;
} // end bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp)
}
bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) {
Image *send_image = prepareImage(image);
if (!config.timestamp_on_capture) {
monitor->TimestampImage(send_image, zm::chrono::duration_cast<timeval>(timestamp.time_since_epoch()));
monitor->TimestampImage(send_image, timestamp);
}
fputs("--" BOUNDARY "\r\n", stdout);
@ -461,7 +461,7 @@ bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) {
} // Not mpeg
last_frame_sent = now;
return true;
} // end bool MonitorStream::sendFrame( Image *image, const timeval &timestamp )
}
void MonitorStream::runStream() {
if (type == STREAM_SINGLE) {
@ -854,7 +854,7 @@ void MonitorStream::SingleImage(int scale) {
Image scaled_image;
while ((monitor->shared_data->last_write_index >= monitor->image_buffer_count) and !zm_terminate) {
Debug(1, "Waiting for capture to begin");
usleep(100000);
std::this_thread::sleep_for(Milliseconds(100));
}
int index = monitor->shared_data->last_write_index % monitor->image_buffer_count;
Debug(1, "write index: %d %d", monitor->shared_data->last_write_index, index);
@ -865,8 +865,9 @@ void MonitorStream::SingleImage(int scale) {
scaled_image.Scale(scale);
snap_image = &scaled_image;
}
if ( !config.timestamp_on_capture ) {
monitor->TimestampImage(snap_image, monitor->shared_timestamps[index]);
if (!config.timestamp_on_capture) {
monitor->TimestampImage(snap_image,
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(monitor->shared_timestamps[index])));
}
snap_image->EncodeJpeg(img_buffer, &img_buffer_size);

View File

@ -21,7 +21,6 @@
#define ZM_MONITORSTREAM_H
#include "zm_stream.h"
#include <sys/time.h>
class MonitorStream : public StreamBase {
protected:

View File

@ -21,13 +21,7 @@
#include "zm_logger.h"
#include "zm_rgb.h"
#include <cstring>
#include <unistd.h>
extern "C" {
#include <libavutil/mathematics.h>
#include <libavcodec/avcodec.h>
}
#include "zm_time.h"
bool VideoStream::initialised = false;
@ -537,32 +531,29 @@ int VideoStream::SendPacket(AVPacket *packet) {
return ret;
}
void *VideoStream::StreamingThreadCallback(void *ctx){
Debug( 1, "StreamingThreadCallback started" );
if (ctx == nullptr) return nullptr;
void *VideoStream::StreamingThreadCallback(void *ctx) {
Debug(1, "StreamingThreadCallback started");
VideoStream* videoStream = reinterpret_cast<VideoStream*>(ctx);
if (ctx == nullptr) {
return nullptr;
}
const uint64_t nanosecond_multiplier = 1000000000;
VideoStream *videoStream = reinterpret_cast<VideoStream *>(ctx);
uint64_t target_interval_ns = nanosecond_multiplier * ( ((double)videoStream->codec_context->time_base.num) / (videoStream->codec_context->time_base.den) );
TimePoint::duration target_interval = std::chrono::duration_cast<TimePoint::duration>(FPSeconds(
videoStream->codec_context->time_base.num / static_cast<double>(videoStream->codec_context->time_base.den)));
uint64_t frame_count = 0;
timespec start_time;
clock_gettime(CLOCK_MONOTONIC, &start_time);
uint64_t start_time_ns = (start_time.tv_sec*nanosecond_multiplier) + start_time.tv_nsec;
while(videoStream->do_streaming) {
timespec current_time;
clock_gettime(CLOCK_MONOTONIC, &current_time);
uint64_t current_time_ns = (current_time.tv_sec*nanosecond_multiplier) + current_time.tv_nsec;
uint64_t target_ns = start_time_ns + (target_interval_ns * frame_count);
if ( current_time_ns < target_ns ) {
// It's not time to render a frame yet.
usleep( (target_ns - current_time_ns) * 0.001 );
}
uint64_t frame_count = 0;
TimePoint start_time = std::chrono::steady_clock::now();
while (videoStream->do_streaming) {
TimePoint current_time = std::chrono::steady_clock::now();
TimePoint target = start_time + (target_interval * frame_count);
if (current_time < target) {
// It's not time to render a frame yet.
std::this_thread::sleep_for(target - current_time);
}
// By sending the last rendered frame we deliver frames to the client more accurate.
// If we're encoding the frame before sending it there will be lag.
@ -573,27 +564,29 @@ void *VideoStream::StreamingThreadCallback(void *ctx){
if (packet->size) {
videoStream->SendPacket(packet);
}
av_packet_unref( packet);
av_packet_unref(packet);
videoStream->packet_index = videoStream->packet_index ? 0 : 1;
// Lock buffer and render next frame.
if ( pthread_mutex_lock( videoStream->buffer_copy_lock ) != 0 ) {
Fatal( "StreamingThreadCallback: pthread_mutex_lock failed." );
}
if ( videoStream->buffer_copy ) {
// Encode next frame.
videoStream->ActuallyEncodeFrame( videoStream->buffer_copy, videoStream->buffer_copy_used, videoStream->add_timestamp, videoStream->timestamp );
}
if ( pthread_mutex_unlock( videoStream->buffer_copy_lock ) != 0 ) {
Fatal( "StreamingThreadCallback: pthread_mutex_unlock failed." );
}
frame_count++;
}
return nullptr;
if (pthread_mutex_lock(videoStream->buffer_copy_lock) != 0) {
Fatal("StreamingThreadCallback: pthread_mutex_lock failed.");
}
if (videoStream->buffer_copy) {
// Encode next frame.
videoStream->ActuallyEncodeFrame(videoStream->buffer_copy,
videoStream->buffer_copy_used,
videoStream->add_timestamp,
videoStream->timestamp);
}
if (pthread_mutex_unlock(videoStream->buffer_copy_lock) != 0) {
Fatal("StreamingThreadCallback: pthread_mutex_unlock failed.");
}
frame_count++;
}
return nullptr;
}

View File

@ -21,7 +21,6 @@
#include "zm_ffmpeg.h"
#include "zm_image.h"
#include "zm_logger.h"
#include <sys/time.h>
using namespace std;
AVPixelFormat target_format = AV_PIX_FMT_NONE;
@ -31,7 +30,6 @@ ZMPacket::ZMPacket() :
stream(nullptr),
in_frame(nullptr),
out_frame(nullptr),
timestamp({}),
buffer(nullptr),
image(nullptr),
analysis_image(nullptr),
@ -40,13 +38,13 @@ ZMPacket::ZMPacket() :
image_index(-1),
codec_imgsize(0),
pts(0),
decoded(0)
decoded(false)
{
av_init_packet(&packet);
packet.size = 0; // So we can detect whether it has been filled.
}
ZMPacket::ZMPacket(Image *i, const timeval &tv) :
ZMPacket::ZMPacket(Image *i, SystemTimePoint tv) :
keyframe(0),
stream(nullptr),
in_frame(nullptr),
@ -60,7 +58,7 @@ ZMPacket::ZMPacket(Image *i, const timeval &tv) :
image_index(-1),
codec_imgsize(0),
pts(0),
decoded(0)
decoded(false)
{
av_init_packet(&packet);
packet.size = 0; // So we can detect whether it has been filled.
@ -80,7 +78,7 @@ ZMPacket::ZMPacket(ZMPacket &p) :
image_index(-1),
codec_imgsize(0),
pts(0),
decoded(0)
decoded(false)
{
av_init_packet(&packet);
packet.size = 0;
@ -95,8 +93,8 @@ ZMPacket::~ZMPacket() {
if (in_frame) av_frame_free(&in_frame);
if (out_frame) av_frame_free(&out_frame);
if (buffer) av_freep(&buffer);
if (analysis_image) delete analysis_image;
if (image) delete image;
delete analysis_image;
delete image;
}
/* returns < 0 on error, 0 on not ready, int bytes consumed on success
@ -243,8 +241,8 @@ AVPacket *ZMPacket::set_packet(AVPacket *p) {
if (zm_av_packet_ref(&packet, p) < 0) {
Error("error refing packet");
}
//ZM_DUMP_PACKET(packet, "zmpacket:");
gettimeofday(&timestamp, nullptr);
timestamp = std::chrono::system_clock::now();
keyframe = p->flags & AV_PKT_FLAG_KEY;
return &packet;
}

View File

@ -21,6 +21,7 @@
#define ZM_PACKET_H
#include "zm_logger.h"
#include "zm_time.h"
#include "zm_zone.h"
#include <condition_variable>
@ -31,10 +32,6 @@ extern "C" {
#include <libavformat/avformat.h>
}
#ifdef __FreeBSD__
#include <sys/time.h>
#endif // __FreeBSD__
class Image;
class ZMPacket {
@ -49,7 +46,7 @@ class ZMPacket {
AVPacket packet; // Input packet, undecoded
AVFrame *in_frame; // Input image, decoded Theoretically only filled if needed.
AVFrame *out_frame; // output image, Only filled if needed.
timeval timestamp;
SystemTimePoint timestamp;
uint8_t *buffer; // buffer used in image
Image *image;
Image *analysis_image;
@ -70,7 +67,7 @@ class ZMPacket {
int is_keyframe() { return keyframe; };
int decode( AVCodecContext *ctx );
explicit ZMPacket(Image *image, const timeval &tv);
explicit ZMPacket(Image *image, SystemTimePoint tv);
explicit ZMPacket(ZMPacket &packet);
ZMPacket();
~ZMPacket();

View File

@ -24,7 +24,6 @@
#include "zm_ffmpeg.h"
#include "zm_packet.h"
#include "zm_signal.h"
#include <sys/time.h>
PacketQueue::PacketQueue():
video_stream_id(-1),

View File

@ -300,15 +300,17 @@ int RemoteCameraHttp::ReadData(Buffer &buffer, unsigned int bytes_expected) {
} // end readData
int RemoteCameraHttp::GetData() {
time_t start_time = time(nullptr);
int buffer_len = 0;
while (!(buffer_len = ReadData(buffer))) {
if (zm_terminate or ( (time(nullptr) - start_time) > ZM_WATCH_MAX_DELAY ))
return -1;
Debug(4, "Timeout waiting for REGEXP HEADER");
usleep(100000);
}
return buffer_len;
TimePoint start_time = std::chrono::steady_clock::now();
int buffer_len;
while (!(buffer_len = ReadData(buffer))) {
if (zm_terminate or std::chrono::steady_clock::now() - start_time > FPSeconds(config.watch_max_delay)) {
return -1;
}
Debug(4, "Timeout waiting for REGEXP HEADER");
std::this_thread::sleep_for(Milliseconds(100));
}
return buffer_len;
}
int RemoteCameraHttp::GetResponse() {

View File

@ -126,10 +126,11 @@ int RemoteCameraRtsp::Disconnect() {
int RemoteCameraRtsp::PrimeCapture() {
Debug(2, "Waiting for sources");
for ( int i = 0; (i < 100) && !rtspThread->hasSources(); i++ ) {
usleep(100000);
for (int i = 0; i < 100 && !rtspThread->hasSources(); i++) {
std::this_thread::sleep_for(Microseconds(100));
}
if ( !rtspThread->hasSources() ) {
if (!rtspThread->hasSources()) {
Error("No RTSP sources");
return -1;
}

View File

@ -269,16 +269,16 @@ void RtpCtrlThread::Run() {
// The only reason I can think of why we would have a timeout period is so that we can regularly send RR packets.
// Why 10 seconds? If anything I think this should be whatever timeout value was given in the DESCRIBE response
zm::Select select(10 );
zm::Select select(Seconds(10));
select.addReader( &rtpCtrlServer );
unsigned char buffer[ZM_NETWORK_BUFSIZ];
time_t last_receive = time(nullptr);
bool timeout = false; // used as a flag that we had a timeout, and then sent an RR to see if we wake back up. Real timeout will happen when this is true.
TimePoint last_receive = std::chrono::steady_clock::now();
bool timeout = false; // used as a flag that we had a timeout, and then sent an RR to see if we wake back up. Real timeout will happen when this is true.
while (!mTerminate && select.wait() >= 0) {
time_t now = time(nullptr);
TimePoint now = std::chrono::steady_clock::now();
zm::Select::CommsList readable = select.getReadable();
if ( readable.size() == 0 ) {
if ( ! timeout ) {
@ -287,20 +287,20 @@ void RtpCtrlThread::Run() {
unsigned char *bufferPtr = buffer;
bufferPtr += generateRr( bufferPtr, sizeof(buffer)-(bufferPtr-buffer) );
bufferPtr += generateSdes( bufferPtr, sizeof(buffer)-(bufferPtr-buffer) );
Debug(3, "Preventing timeout by sending %zd bytes on sd %d. Time since last receive: %" PRIi64,
bufferPtr - buffer, rtpCtrlServer.getWriteDesc(), static_cast<int64>(now - last_receive));
Debug(3, "Preventing timeout by sending %zd bytes on sd %d. Time since last receive: %.2f s",
bufferPtr - buffer, rtpCtrlServer.getWriteDesc(), FPSeconds(now - last_receive).count());
if ( (nBytes = rtpCtrlServer.send(buffer, bufferPtr-buffer)) < 0 )
Error("Unable to send: %s", strerror(errno));
timeout = true;
continue;
} else {
Debug(1, "RTCP timed out. Time since last receive: %" PRIi64, static_cast<int64>(now - last_receive));
Debug(1, "RTCP timed out. Time since last receive: %.2f s", FPSeconds(now - last_receive).count());
continue;
//break;
}
} else {
timeout = false;
last_receive = time(nullptr);
last_receive = std::chrono::steady_clock::now();
}
for (zm::Select::CommsList::iterator iter = readable.begin(); iter != readable.end(); ++iter ) {
if ( zm::UdpInetSocket *socket = dynamic_cast<zm::UdpInetSocket *>(*iter) ) {

View File

@ -76,7 +76,7 @@ void RtpDataThread::Run() {
}
Debug(3, "Bound to %s:%d", mRtpSource.getLocalHost().c_str(), mRtpSource.getLocalDataPort());
zm::Select select(3);
zm::Select select(Seconds(3));
select.addReader(&rtpDataSocket);
unsigned char buffer[ZM_NETWORK_BUFSIZ];

View File

@ -66,7 +66,7 @@ RtpSource::RtpSource(
mRtpFactor = mRtpClock;
mBaseTimeReal = tvNow();
mBaseTimeReal = std::chrono::system_clock::now();
mBaseTimeNtp = {};
mBaseTimeRtp = rtpTime;
@ -159,12 +159,9 @@ bool RtpSource::updateSeq(uint16_t seq) {
}
void RtpSource::updateJitter( const RtpDataHeader *header ) {
if ( mRtpFactor > 0 ) {
timeval now = {};
gettimeofday(&now, nullptr);
FPSeconds time_diff =
zm::chrono::duration_cast<Microseconds>(now) - zm::chrono::duration_cast<Microseconds>(mBaseTimeReal);
if (mRtpFactor > 0) {
SystemTimePoint now = std::chrono::system_clock::now();
FPSeconds time_diff = std::chrono::duration_cast<FPSeconds>(now - mBaseTimeReal);
uint32_t localTimeRtp = mBaseTimeRtp + static_cast<uint32>(time_diff.count() * mRtpFactor);
uint32_t packetTransit = localTimeRtp - ntohl(header->timestampN);
@ -202,7 +199,7 @@ void RtpSource::updateRtcpData(
Debug(5, "ntpTime: %ld.%06ld, rtpTime: %x", ntpTime.tv_sec, ntpTime.tv_usec, rtpTime);
if ( mBaseTimeNtp.tv_sec == 0 ) {
mBaseTimeReal = tvNow();
mBaseTimeReal = std::chrono::system_clock::now();
mBaseTimeNtp = ntpTime;
mBaseTimeRtp = rtpTime;
} else if ( !mRtpClock ) {

View File

@ -24,6 +24,7 @@
#include "zm_config.h"
#include "zm_define.h"
#include "zm_ffmpeg.h"
#include "zm_time.h"
#include <condition_variable>
#include <mutex>
#include <string>
@ -68,7 +69,7 @@ private:
// Time keys
uint32_t mRtpClock;
uint32_t mRtpFactor;
struct timeval mBaseTimeReal;
SystemTimePoint mBaseTimeReal;
struct timeval mBaseTimeNtp;
uint32_t mBaseTimeRtp;

View File

@ -331,7 +331,8 @@ void RtspThread::Run() {
authTried = true;
sendCommand(message);
// FIXME Why sleep 1?
usleep(10000);
std::this_thread::sleep_for(Microseconds(10));
res = recvResponse(response);
if ( !res && respCode==401 )
mNeedAuth = true;
@ -438,15 +439,18 @@ void RtspThread::Run() {
lines = Split(response, "\r\n");
std::string session;
int timeout = 0;
Seconds timeout = Seconds(0);
char transport[256] = "";
for ( size_t i = 0; i < lines.size(); i++ ) {
if ( ( lines[i].size() > 8 ) && ( lines[i].substr(0, 8) == "Session:" ) ) {
StringVector sessionLine = Split(lines[i].substr(9), ";");
session = TrimSpaces(sessionLine[0]);
if ( sessionLine.size() == 2 )
sscanf(TrimSpaces(sessionLine[1]).c_str(), "timeout=%d", &timeout);
if ( sessionLine.size() == 2 ){
int32 timeout_val = 0;
sscanf(TrimSpaces(sessionLine[1]).c_str(), "timeout=%d", &timeout_val);
timeout = Seconds(timeout_val);
}
}
sscanf(lines[i].c_str(), "Transport: %s", transport);
}
@ -454,7 +458,7 @@ void RtspThread::Run() {
if ( session.empty() )
Fatal("Unable to get session identifier from response '%s'", response.c_str());
Debug(2, "Got RTSP session %s, timeout %d secs", session.c_str(), timeout);
Debug(2, "Got RTSP session %s, timeout %" PRIi64 " secs", session.c_str(), Seconds(timeout).count());
if ( !transport[0] )
Fatal("Unable to get transport details from response '%s'", response.c_str());
@ -517,12 +521,17 @@ void RtspThread::Run() {
if ( ( lines[i].size() > 9 ) && ( lines[i].substr(0, 9) == "RTP-Info:" ) )
rtpInfo = TrimSpaces(lines[i].substr(9));
// Check for a timeout again. Some rtsp devices don't send a timeout until after the PLAY command is sent
if ( ( lines[i].size() > 8 ) && ( lines[i].substr(0, 8) == "Session:" ) && ( timeout == 0 ) ) {
if ((lines[i].size() > 8) && (lines[i].substr(0, 8) == "Session:") && (timeout == Seconds(0))) {
StringVector sessionLine = Split(lines[i].substr(9), ";");
if ( sessionLine.size() == 2 )
sscanf(TrimSpaces(sessionLine[1]).c_str(), "timeout=%d", &timeout);
if ( timeout > 0 )
Debug(2, "Got timeout %d secs from PLAY command response", timeout);
if ( sessionLine.size() == 2 ){
int32 timeout_val = 0;
sscanf(TrimSpaces(sessionLine[1]).c_str(), "timeout=%d", &timeout_val);
timeout = Seconds(timeout_val);
}
if ( timeout > Seconds(0) ) {
Debug(2, "Got timeout %" PRIi64 " secs from PLAY command response", Seconds(timeout).count());
}
}
}
@ -557,8 +566,8 @@ void RtspThread::Run() {
Debug( 2, "RTSP Seq is %d", seq );
Debug( 2, "RTSP Rtptime is %ld", rtpTime );
time_t lastKeepalive = time(nullptr);
time_t now;
TimePoint lastKeepalive = std::chrono::steady_clock::now();
TimePoint now;
message = "GET_PARAMETER "+mUrl+" RTSP/1.0\r\nSession: "+session+"\r\n";
switch( mMethod ) {
@ -570,20 +579,21 @@ void RtspThread::Run() {
RtpCtrlThread rtpCtrlThread( *this, *source );
while (!mTerminate) {
now = time(nullptr);
now = std::chrono::steady_clock::now();
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
Debug(5, "sendkeepalive %d, timeout %d, now: %" PRIi64 " last: %" PRIi64 " since: %" PRIi64,
Debug(5, "sendkeepalive %d, timeout %" PRIi64 " s, now: %" PRIi64 " s last: %" PRIi64 " s since: %" PRIi64 "s ",
sendKeepalive,
timeout,
static_cast<int64>(now),
static_cast<int64>(lastKeepalive),
static_cast<int64>(now - lastKeepalive));
if ( sendKeepalive && (timeout > 0) && ((now-lastKeepalive) > (timeout-5)) ) {
if ( !sendCommand( message ) )
static_cast<int64>(Seconds(timeout).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(lastKeepalive.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>((now - lastKeepalive)).count()));
if (sendKeepalive && (timeout > Seconds(0)) && ((now - lastKeepalive) > (timeout - Seconds(5)))) {
if (!sendCommand(message))
return;
lastKeepalive = now;
}
usleep( 100000 );
std::this_thread::sleep_for(Microseconds(100));
}
#if 0
message = "PAUSE "+mUrl+" RTSP/1.0\r\nSession: "+session+"\r\n";
@ -621,7 +631,7 @@ void RtspThread::Run() {
RtpDataThread rtpDataThread( *this, *source );
RtpCtrlThread rtpCtrlThread( *this, *source );
zm::Select select(double(config.http_timeout)/1000.0 );
zm::Select select(Milliseconds(config.http_timeout));
select.addReader( &mRtspSocket );
Buffer buffer( ZM_NETWORK_BUFSIZ );
@ -694,21 +704,23 @@ void RtspThread::Run() {
}
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
// FIXME: Is this really necessary when using tcp ?
now = time(nullptr);
now = std::chrono::steady_clock::now();
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
Debug(5, "sendkeepalive %d, timeout %d, now: %" PRIi64 " last: %" PRIi64 " since: %" PRIi64,
Debug(5, "sendkeepalive %d, timeout %" PRIi64 " s, now: %" PRIi64 " s last: %" PRIi64 " s since: %" PRIi64 " s",
sendKeepalive,
timeout,
static_cast<int64>(now),
static_cast<int64>(lastKeepalive),
static_cast<int64>(now - lastKeepalive));
if ( sendKeepalive && (timeout > 0) && ((now-lastKeepalive) > (timeout-5)) )
{
if ( !sendCommand( message ) )
static_cast<int64>(Seconds(timeout).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(lastKeepalive.time_since_epoch()).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>((now - lastKeepalive)).count()));
if (sendKeepalive && (timeout > Seconds(0)) && ((now - lastKeepalive) > (timeout - Seconds(5)))) {
if (!sendCommand(message)) {
return;
}
lastKeepalive = now;
}
buffer.tidy( 1 );
buffer.tidy(true);
}
#if 0
message = "PAUSE "+mUrl+" RTSP/1.0\r\nSession: "+session+"\r\n";
@ -737,12 +749,14 @@ void RtspThread::Run() {
while (!mTerminate) {
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
if ( sendKeepalive && (timeout > 0) && ((time(nullptr)-lastKeepalive) > (timeout-5)) ) {
if ( !sendCommand( message ) )
if (sendKeepalive && (timeout > Seconds(0))
&& ((std::chrono::steady_clock::now() - lastKeepalive) > (timeout - Seconds(5)))) {
if (!sendCommand(message)) {
return;
lastKeepalive = time(nullptr);
}
lastKeepalive = std::chrono::steady_clock::now();
}
usleep(100000);
std::this_thread::sleep_for(Microseconds(100));
}
#if 0
message = "PAUSE "+mUrl+" RTSP/1.0\r\nSession: "+session+"\r\n";

View File

@ -23,12 +23,6 @@
#include <chrono>
#include <sys/time.h>
inline struct timeval tvNow() {
timeval t = {};
gettimeofday(&t, nullptr);
return t;
}
typedef std::chrono::microseconds Microseconds;
typedef std::chrono::milliseconds Milliseconds;
typedef std::chrono::seconds Seconds;

View File

@ -21,6 +21,7 @@
#include "zm_crypt.h"
#include "zm_logger.h"
#include "zm_time.h"
#include "zm_utils.h"
#include <cstring>
@ -205,24 +206,27 @@ User *zmLoadAuthUser(const char *auth, bool use_remote_addr) {
return nullptr;
}
// getting the time is expensive, so only do it once.
time_t now = time(nullptr);
unsigned int hours = config.auth_hash_ttl;
if (!hours) {
SystemTimePoint now = std::chrono::system_clock::now();
Hours hours = Hours(config.auth_hash_ttl);
if (hours == Hours(0)) {
Warning("No value set for ZM_AUTH_HASH_TTL. Defaulting to 2.");
hours = 2;
hours = Hours(2);
} else {
Debug(1, "AUTH_HASH_TTL is %d, time is %" PRIi64, hours, static_cast<int64>(now));
Debug(1, "AUTH_HASH_TTL is %" PRIi64 " h, time is %" PRIi64 " s",
static_cast<int64>(Hours(hours).count()),
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()));
}
while (MYSQL_ROW dbrow = mysql_fetch_row(result)) {
const char *username = dbrow[1];
const char *password = dbrow[2];
time_t our_now = now;
SystemTimePoint our_now = now;
tm now_tm = {};
for (unsigned int i = 0; i < hours; i++, our_now -= 3600) {
localtime_r(&our_now, &now_tm);
for (Hours i = Hours(0); i < hours; i++, our_now -= Hours(1)) {
time_t our_now_t = std::chrono::system_clock::to_time_t(our_now);
localtime_r(&our_now_t, &now_tm);
std::string auth_key = stringtf("%s%s%s%s%d%d%d%d",
config.auth_hash_secret,

View File

@ -22,6 +22,7 @@
#include "zm_logger.h"
#include "zm_monitor.h"
#include "zm_time.h"
extern "C" {
#include <libavutil/time.h>
@ -92,7 +93,6 @@ VideoStore::VideoStore(
converted_in_samples(nullptr),
filename(filename_in),
format(format_in),
video_first_pts(0), /* starting pts of first in frame/packet */
video_first_dts(0),
audio_first_pts(0),
audio_first_dts(0),
@ -989,25 +989,24 @@ int VideoStore::writeVideoFramePacket(const std::shared_ptr<ZMPacket> &zm_packet
//zm_packet->out_frame->key_frame = zm_packet->keyframe;
frame->pkt_duration = 0;
int64_t in_pts = zm_packet->timestamp.tv_sec * (uint64_t)1000000 + zm_packet->timestamp.tv_usec;
if (!video_first_pts) {
video_first_pts = in_pts;
Debug(2, "No video_first_pts, set to (%" PRId64 ") secs(%" PRIi64 ") usecs(%" PRIi64 ")",
video_first_pts = zm_packet->timestamp.time_since_epoch().count();
Debug(2, "No video_first_pts, set to (%" PRId64 ") secs(%.2f)",
video_first_pts,
static_cast<int64>(zm_packet->timestamp.tv_sec),
static_cast<int64>(zm_packet->timestamp.tv_usec));
FPSeconds(zm_packet->timestamp.time_since_epoch()).count());
frame->pts = 0;
} else {
uint64_t useconds = in_pts - video_first_pts;
frame->pts = av_rescale_q(useconds, AV_TIME_BASE_Q, video_out_ctx->time_base);
Microseconds useconds = std::chrono::duration_cast<Microseconds>(
zm_packet->timestamp - SystemTimePoint(Microseconds(video_first_pts)));
frame->pts = av_rescale_q(useconds.count(), AV_TIME_BASE_Q, video_out_ctx->time_base);
Debug(2,
"Setting pts for frame(%d) to (%" PRId64 ") from (start %" PRIu64 " - %" PRIu64 " - secs(%" PRIi64 ") usecs(%" PRIi64 ") @ %d/%d",
"Setting pts for frame(%d) to (%" PRId64 ") from (start %" PRIu64 " - %" PRIu64 " - us(%" PRIi64 ") @ %d/%d",
frame_count,
frame->pts,
video_first_pts,
useconds,
static_cast<int64>(zm_packet->timestamp.tv_sec),
static_cast<int64>(zm_packet->timestamp.tv_usec),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(useconds).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(zm_packet->timestamp.time_since_epoch()).count()),
video_out_ctx->time_base.num,
video_out_ctx->time_base.den);
}

View File

@ -76,7 +76,7 @@ class VideoStore {
const char *format;
// These are for in
int64_t video_first_pts;
int64_t video_first_pts; /* starting pts of first in frame/packet */
int64_t video_first_dts;
int64_t audio_first_pts;
int64_t audio_first_dts;

View File

@ -64,8 +64,6 @@ possible, this should run at more or less constant speed.
#include "zm_time.h"
#include "zm_utils.h"
#include <getopt.h>
#include <iostream>
#include <unistd.h>
void Usage() {
fprintf(stderr, "zmc -d <device_path> or -r <proto> -H <host> -P <port> -p <path> or -f <file_path> or -m <monitor_id>\n");
@ -214,7 +212,7 @@ int main(int argc, char *argv[]) {
Error("No monitors found");
exit(-1);
} else {
Debug(2, "%zu monitors loaded", monitors.size());
Debug(2, "%zu monitors loaded", monitors.size());
}
Info("Starting Capture version %s", ZM_VERSION);
@ -242,52 +240,51 @@ int main(int argc, char *argv[]) {
if (!monitor->connect()) {
Warning("Couldn't connect to monitor %d", monitor->Id());
}
time_t now = (time_t)time(nullptr);
monitor->setStartupTime(now);
monitor->setHeartbeatTime(now);
SystemTimePoint now = std::chrono::system_clock::now();
monitor->SetStartupTime(now);
monitor->SetHeartbeatTime(now);
snprintf(sql, sizeof(sql),
snprintf(sql, sizeof(sql),
"INSERT INTO Monitor_Status (MonitorId,Status,CaptureFPS,AnalysisFPS)"
" VALUES (%u, 'Running',0,0) ON DUPLICATE KEY UPDATE Status='Running',CaptureFPS=0,AnalysisFPS=0",
monitor->Id());
zmDbDo(sql);
int sleep_time = 0;
Seconds sleep_time = Seconds(0);
while (monitor->PrimeCapture() <= 0) {
if (prime_capture_log_count % 60) {
logPrintf(Logger::ERROR+monitor->Importance(),
"Failed to prime capture of initial monitor");
logPrintf(Logger::ERROR + monitor->Importance(),
"Failed to prime capture of initial monitor");
} else {
Debug(1, "Failed to prime capture of initial monitor");
}
prime_capture_log_count ++;
if (zm_terminate) break;
if (sleep_time < 60) sleep_time++;
sleep(sleep_time);
prime_capture_log_count++;
if (zm_terminate) {
break;
}
if (sleep_time < Seconds(60)) {
sleep_time++;
}
std::this_thread::sleep_for(sleep_time);
}
if (zm_terminate){
break;
}
if (zm_terminate) break;
snprintf(sql, sizeof(sql),
"INSERT INTO Monitor_Status (MonitorId,Status) VALUES (%u, 'Connected') ON DUPLICATE KEY UPDATE Status='Connected'",
monitor->Id());
zmDbDo(sql);
} // end foreach monitor
if (zm_terminate) break;
int *capture_delays = new int[monitors.size()];
int *alarm_capture_delays = new int[monitors.size()];
struct timeval * last_capture_times = new struct timeval[monitors.size()];
for (size_t i = 0; i < monitors.size(); i++) {
last_capture_times[i].tv_sec = last_capture_times[i].tv_usec = 0;
capture_delays[i] = monitors[i]->GetCaptureDelay();
alarm_capture_delays[i] = monitors[i]->GetAlarmCaptureDelay();
Debug(2, "capture delay(%u mSecs 1000/capture_fps) alarm delay(%u)",
capture_delays[i], alarm_capture_delays[i]);
if (zm_terminate){
break;
}
timeval now;
int sleep_time = 0;
std::vector<SystemTimePoint> last_capture_times = std::vector<SystemTimePoint>(monitors.size());
Microseconds sleep_time = Microseconds(0);
while (!zm_terminate) {
//sigprocmask(SIG_BLOCK, &block_set, 0);
@ -314,30 +311,28 @@ int main(int argc, char *argv[]) {
}
// capture_delay is the amount of time we should sleep in useconds to achieve the desired framerate.
int delay = (monitors[i]->GetState() == Monitor::ALARM) ? alarm_capture_delays[i] : capture_delays[i];
if (delay) {
gettimeofday(&now, nullptr);
if (last_capture_times[i].tv_sec) {
Microseconds delta_time = zm::chrono::duration_cast<Microseconds>(now)
- zm::chrono::duration_cast<Microseconds>(last_capture_times[i]);
Microseconds delay = (monitors[i]->GetState() == Monitor::ALARM) ? monitors[i]->GetAlarmCaptureDelay()
: monitors[i]->GetCaptureDelay();
if (delay != Seconds(0)) {
SystemTimePoint now = std::chrono::system_clock::now();
if (last_capture_times[i].time_since_epoch() != Seconds(0)) {
Microseconds delta_time = std::chrono::duration_cast<Microseconds>(now - last_capture_times[i]);
// You have to add back in the previous sleep time
sleep_time = delay - (delta_time.count() - sleep_time);
sleep_time = delay - (delta_time - sleep_time);
Debug(4,
"Sleep time is %d from now: %" PRIi64 ".%" PRIi64" last: %" PRIi64 ".% " PRIi64 " delta % " PRIi64 " delay: %d",
sleep_time,
static_cast<int64>(now.tv_sec),
static_cast<int64>(now.tv_usec),
static_cast<int64>(last_capture_times[i].tv_sec),
static_cast<int64>(last_capture_times[i].tv_usec),
"Sleep time is %" PRIi64 " from now: %.2f s last: %.2f s delta % " PRIi64 " us delay: %" PRIi64 " us",
static_cast<int64>(Microseconds(sleep_time).count()),
FPSeconds(now.time_since_epoch()).count(),
FPSeconds(last_capture_times[i].time_since_epoch()).count(),
static_cast<int64>(delta_time.count()),
delay);
static_cast<int64>(Microseconds(delay).count()));
if (sleep_time > 0) {
Debug(4, "usleeping (%d)", sleep_time);
usleep(sleep_time);
if (sleep_time > Seconds(0)) {
std::this_thread::sleep_for(sleep_time);
}
} // end if has a last_capture time
last_capture_times[i] = now;
} // end if delay
} // end foreach n_monitors
@ -348,22 +343,18 @@ int main(int argc, char *argv[]) {
}
} // end while ! zm_terminate and connected
for (size_t i = 0; i < monitors.size(); i++) {
monitors[i]->Close();
monitors[i]->disconnect();
for (std::shared_ptr<Monitor> & monitor : monitors) {
monitor->Close();
monitor->disconnect();
}
delete [] alarm_capture_delays;
delete [] capture_delays;
delete [] last_capture_times;
if (zm_reload) {
for (std::shared_ptr<Monitor> &monitor : monitors) {
monitor->Reload();
}
logTerm();
logInit(log_id_string);
zm_reload = false;
} // end if zm_reload
} // end while ! zm_terminate outer connection loop
@ -371,7 +362,7 @@ int main(int argc, char *argv[]) {
for (std::shared_ptr<Monitor> &monitor : monitors) {
static char sql[ZM_SQL_SML_BUFSIZ];
snprintf(sql, sizeof(sql),
"INSERT INTO Monitor_Status (MonitorId,Status) VALUES (%u, 'NotRunning') ON DUPLICATE KEY UPDATE Status='NotRunning'",
"INSERT INTO Monitor_Status (MonitorId,Status) VALUES (%u, 'NotRunning') ON DUPLICATE KEY UPDATE Status='NotRunning'",
monitor->Id());
zmDbDo(sql);
}
@ -382,5 +373,5 @@ int main(int argc, char *argv[]) {
dbQueue.stop();
zmDbClose();
return zm_terminate ? 0 : result;
return zm_terminate ? 0 : result;
}

View File

@ -241,7 +241,7 @@ int main(int argc, const char *argv[], char **envp) {
}
fprintf(stdout, "Server: ZoneMinder Video Server/%s\r\n", ZM_VERSION);
time_t now = time(nullptr);
time_t now = std::chrono::system_clock::to_time_t(std::chrono::system_clock::now());
char date_string[64];
tm now_tm = {};
strftime(date_string, sizeof(date_string)-1,

View File

@ -93,7 +93,6 @@ Options for use with monitors:
#include "zm_monitor.h"
#include "zm_local_camera.h"
#include <getopt.h>
#include <unistd.h>
void Usage(int status=-1) {
fputs(
@ -498,20 +497,27 @@ int main(int argc, char *argv[]) {
}
}
if ( function & ZMU_TIME ) {
struct timeval timestamp = monitor->GetTimestamp(image_idx);
if ( verbose ) {
SystemTimePoint timestamp = monitor->GetTimestamp(image_idx);
if (verbose) {
char timestamp_str[64] = "None";
if ( timestamp.tv_sec ) {
if (timestamp.time_since_epoch() != Seconds(0)) {
tm tm_info = {};
strftime(timestamp_str, sizeof(timestamp_str), "%Y-%m-%d %H:%M:%S", localtime_r(&timestamp.tv_sec, &tm_info));
time_t timestamp_t = std::chrono::system_clock::to_time_t(timestamp);
strftime(timestamp_str, sizeof(timestamp_str), "%Y-%m-%d %H:%M:%S", localtime_r(&timestamp_t, &tm_info));
}
Seconds ts_sec = std::chrono::duration_cast<Seconds>(timestamp.time_since_epoch());
Microseconds ts_usec = std::chrono::duration_cast<Microseconds>(timestamp.time_since_epoch() - ts_sec);
if (image_idx == -1) {
printf("Time of last image capture: %s.%02d\n", timestamp_str, static_cast<int32>(ts_usec.count()));
}
else {
printf("Time of image %d capture: %s.%02d\n", image_idx, timestamp_str, static_cast<int32>(ts_usec.count()));
}
if ( image_idx == -1 )
printf("Time of last image capture: %s.%02ld\n", timestamp_str, timestamp.tv_usec/10000);
else
printf("Time of image %d capture: %s.%02ld\n", image_idx, timestamp_str, timestamp.tv_usec/10000);
} else {
if ( have_output ) fputc(separator, stdout);
printf("%ld.%02ld", timestamp.tv_sec, timestamp.tv_usec/10000);
if (have_output) {
fputc(separator, stdout);
}
printf("%.2f", FPSeconds(timestamp.time_since_epoch()).count());
have_output = true;
}
}
@ -585,13 +591,16 @@ int main(int argc, char *argv[]) {
// Ensure that we are not recording. So the forced alarm is distinct from what was recording before
monitor->ForceAlarmOff();
monitor->ForceAlarmOn(config.forced_alarm_score, "Forced Web");
int wait = 10*1000*1000; // 10 seconds
while ((monitor->GetState() != Monitor::ALARM) and !zm_terminate and wait) {
Microseconds wait_time = Seconds(10);
while ((monitor->GetState() != Monitor::ALARM) and !zm_terminate and wait_time > Seconds(0)) {
// Wait for monitor to notice.
usleep(1000);
wait -= 1000;
Microseconds sleep = Microseconds(1);
std::this_thread::sleep_for(sleep);
wait_time -= sleep;
}
if ( monitor->GetState() != Monitor::ALARM and !wait ) {
if (monitor->GetState() != Monitor::ALARM and wait_time == Seconds(0)) {
Error("Monitor failed to respond to forced alarm.");
} else {
printf("Alarmed event id: %" PRIu64 "\n", monitor->GetLastEventId());
@ -740,13 +749,14 @@ int main(int argc, char *argv[]) {
if ( monitor_function > 1 ) {
std::shared_ptr<Monitor> monitor = Monitor::Load(monitor_id, false, Monitor::QUERY);
if ( monitor && monitor->connect() ) {
struct timeval tv = monitor->GetTimestamp();
printf( "%4d%5d%6d%9d%11ld.%02ld%6d%6d%8" PRIu64 "%8.2f\n",
SystemTimePoint timestamp = monitor->GetTimestamp();
printf( "%4d%5d%6d%9d%14.2f%6d%6d%8" PRIu64 "%8.2f\n",
monitor->Id(),
monitor_function,
monitor->GetState(),
monitor->GetTriggerState(),
tv.tv_sec, tv.tv_usec/10000,
FPSeconds(timestamp.time_since_epoch()).count(),
monitor->GetLastReadIndex(),
monitor->GetLastWriteIndex(),
monitor->GetLastEventId(),
@ -754,13 +764,12 @@ int main(int argc, char *argv[]) {
);
}
} else {
struct timeval tv = { 0, 0 };
printf("%4d%5d%6d%9d%11ld.%02ld%6d%6d%8d%8.2f\n",
mon_id,
function,
0,
0,
tv.tv_sec, tv.tv_usec/10000,
0l, 0l,
0,
0,
0,