wip
This commit is contained in:
parent
0e799233d2
commit
aee2b148f0
|
@ -372,7 +372,6 @@ Monitor::Monitor(
|
||||||
mem_size = sizeof(SharedData)
|
mem_size = sizeof(SharedData)
|
||||||
+ sizeof(TriggerData)
|
+ sizeof(TriggerData)
|
||||||
+ sizeof(VideoStoreData) //Information to pass back to the capture process
|
+ sizeof(VideoStoreData) //Information to pass back to the capture process
|
||||||
+ (image_buffer_count*sizeof(struct timeval))
|
|
||||||
+ (image_buffer_count*camera->ImageSize())
|
+ (image_buffer_count*camera->ImageSize())
|
||||||
+ 64; /* Padding used to permit aligning the images buffer to 64 byte boundary */
|
+ 64; /* Padding used to permit aligning the images buffer to 64 byte boundary */
|
||||||
|
|
||||||
|
@ -553,19 +552,17 @@ bool Monitor::connect() {
|
||||||
shared_data = (SharedData *)mem_ptr;
|
shared_data = (SharedData *)mem_ptr;
|
||||||
trigger_data = (TriggerData *)((char *)shared_data + sizeof(SharedData));
|
trigger_data = (TriggerData *)((char *)shared_data + sizeof(SharedData));
|
||||||
video_store_data = (VideoStoreData *)((char *)trigger_data + sizeof(TriggerData));
|
video_store_data = (VideoStoreData *)((char *)trigger_data + sizeof(TriggerData));
|
||||||
struct timeval *shared_timestamps = (struct timeval *)((char *)video_store_data + sizeof(VideoStoreData));
|
unsigned char *shared_images = (unsigned char *)((char *)video_store_data + sizeof(VideoStoreData));
|
||||||
unsigned char *shared_images = (unsigned char *)((char *)shared_timestamps + (image_buffer_count*sizeof(struct timeval)));
|
|
||||||
|
|
||||||
|
|
||||||
if ( ((unsigned long)shared_images % 64) != 0 ) {
|
if ( ((unsigned long)shared_images % 64) != 0 ) {
|
||||||
/* Align images buffer to nearest 64 byte boundary */
|
/* Align images buffer to nearest 64 byte boundary */
|
||||||
Debug(3,"Aligning shared memory images to the next 64 byte boundary");
|
Debug(3,"Aligning shared memory images to the next 64 byte boundary");
|
||||||
shared_images = (uint8_t*)((unsigned long)shared_images + (64 - ((unsigned long)shared_images % 64)));
|
shared_images = (uint8_t*)((unsigned long)shared_images + (64 - ((unsigned long)shared_images % 64)));
|
||||||
}
|
}
|
||||||
|
|
||||||
Debug(3, "Allocating %d image buffers", image_buffer_count );
|
Debug(3, "Allocating %d image buffers", image_buffer_count );
|
||||||
image_buffer = new Snapshot[image_buffer_count];
|
image_buffer = new ZMPacket[image_buffer_count];
|
||||||
for ( int i = 0; i < image_buffer_count; i++ ) {
|
for ( int i = 0; i < image_buffer_count; i++ ) {
|
||||||
image_buffer[i].timestamp = &(shared_timestamps[i]);
|
|
||||||
image_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]) );
|
image_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]) );
|
||||||
image_buffer[i].image->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
|
image_buffer[i].image->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
|
||||||
}
|
}
|
||||||
|
@ -573,16 +570,14 @@ bool Monitor::connect() {
|
||||||
/* Four field motion adaptive deinterlacing in use */
|
/* Four field motion adaptive deinterlacing in use */
|
||||||
/* Allocate a buffer for the next image */
|
/* Allocate a buffer for the next image */
|
||||||
next_buffer.image = new Image( width, height, camera->Colours(), camera->SubpixelOrder());
|
next_buffer.image = new Image( width, height, camera->Colours(), camera->SubpixelOrder());
|
||||||
next_buffer.timestamp = new struct timeval;
|
|
||||||
}
|
}
|
||||||
if ( ( purpose == ANALYSIS ) && analysis_fps ) {
|
if ( ( purpose == ANALYSIS ) && analysis_fps ) {
|
||||||
// Size of pre event buffer must be greater than pre_event_count
|
// Size of pre event buffer must be greater than pre_event_count
|
||||||
// if alarm_frame_count > 1, because in this case the buffer contains
|
// if alarm_frame_count > 1, because in this case the buffer contains
|
||||||
// alarmed images that must be discarded when event is created
|
// alarmed images that must be discarded when event is created
|
||||||
pre_event_buffer_count = pre_event_count + alarm_frame_count - 1;
|
pre_event_buffer_count = pre_event_count + alarm_frame_count - 1;
|
||||||
pre_event_buffer = new Snapshot[pre_event_buffer_count];
|
pre_event_buffer = new ZMPacket[pre_event_buffer_count];
|
||||||
for ( int i = 0; i < pre_event_buffer_count; i++ ) {
|
for ( int i = 0; i < pre_event_buffer_count; i++ ) {
|
||||||
pre_event_buffer[i].timestamp = new struct timeval;
|
|
||||||
pre_event_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder());
|
pre_event_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -611,7 +606,6 @@ Monitor::~Monitor() {
|
||||||
|
|
||||||
if ( (deinterlacing & 0xff) == 4) {
|
if ( (deinterlacing & 0xff) == 4) {
|
||||||
delete next_buffer.image;
|
delete next_buffer.image;
|
||||||
delete next_buffer.timestamp;
|
|
||||||
}
|
}
|
||||||
for ( int i = 0; i < image_buffer_count; i++ ) {
|
for ( int i = 0; i < image_buffer_count; i++ ) {
|
||||||
delete image_buffer[i].image;
|
delete image_buffer[i].image;
|
||||||
|
@ -636,7 +630,6 @@ Monitor::~Monitor() {
|
||||||
if ( analysis_fps ) {
|
if ( analysis_fps ) {
|
||||||
for ( int i = 0; i < pre_event_buffer_count; i++ ) {
|
for ( int i = 0; i < pre_event_buffer_count; i++ ) {
|
||||||
delete pre_event_buffer[i].image;
|
delete pre_event_buffer[i].image;
|
||||||
delete pre_event_buffer[i].timestamp;
|
|
||||||
}
|
}
|
||||||
delete[] pre_event_buffer;
|
delete[] pre_event_buffer;
|
||||||
}
|
}
|
||||||
|
@ -718,7 +711,7 @@ int Monitor::GetImage( int index, int scale ) {
|
||||||
Image *image;
|
Image *image;
|
||||||
// If we are going to be modifying the snapshot before writing, then we need to copy it
|
// If we are going to be modifying the snapshot before writing, then we need to copy it
|
||||||
if ( ( scale != ZM_SCALE_BASE ) || ( !config.timestamp_on_capture ) ) {
|
if ( ( scale != ZM_SCALE_BASE ) || ( !config.timestamp_on_capture ) ) {
|
||||||
Snapshot *snap = &image_buffer[index];
|
ZMPacket *snap = &image_buffer[index];
|
||||||
Image *snap_image = snap->image;
|
Image *snap_image = snap->image;
|
||||||
|
|
||||||
alarm_image.Assign( *snap_image );
|
alarm_image.Assign( *snap_image );
|
||||||
|
@ -731,7 +724,7 @@ int Monitor::GetImage( int index, int scale ) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( !config.timestamp_on_capture ) {
|
if ( !config.timestamp_on_capture ) {
|
||||||
TimestampImage( &alarm_image, snap->timestamp );
|
TimestampImage( &alarm_image, &snap->timestamp );
|
||||||
}
|
}
|
||||||
image = &alarm_image;
|
image = &alarm_image;
|
||||||
} else {
|
} else {
|
||||||
|
@ -753,13 +746,13 @@ struct timeval Monitor::GetTimestamp( int index ) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( index != image_buffer_count ) {
|
if ( index != image_buffer_count ) {
|
||||||
Snapshot *snap = &image_buffer[index];
|
ZMPacket *snap = &image_buffer[index];
|
||||||
|
|
||||||
return( *(snap->timestamp) );
|
return snap->timestamp;
|
||||||
} else {
|
} else {
|
||||||
static struct timeval null_tv = { 0, 0 };
|
static struct timeval null_tv = { 0, 0 };
|
||||||
|
|
||||||
return( null_tv );
|
return null_tv;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -778,29 +771,29 @@ unsigned int Monitor::GetLastEvent() const {
|
||||||
double Monitor::GetFPS() const {
|
double Monitor::GetFPS() const {
|
||||||
int index1 = shared_data->last_write_index;
|
int index1 = shared_data->last_write_index;
|
||||||
if ( index1 == image_buffer_count ) {
|
if ( index1 == image_buffer_count ) {
|
||||||
return( 0.0 );
|
return 0.0;
|
||||||
}
|
}
|
||||||
Snapshot *snap1 = &image_buffer[index1];
|
ZMPacket *snap1 = &image_buffer[index1];
|
||||||
if ( !snap1->timestamp || !snap1->timestamp->tv_sec ) {
|
if ( !snap1->timestamp.tv_sec ) {
|
||||||
return( 0.0 );
|
return 0.0;
|
||||||
}
|
}
|
||||||
struct timeval time1 = *snap1->timestamp;
|
struct timeval time1 = snap1->timestamp;
|
||||||
|
|
||||||
int image_count = image_buffer_count;
|
int image_count = image_buffer_count;
|
||||||
int index2 = (index1+1)%image_buffer_count;
|
int index2 = (index1+1)%image_buffer_count;
|
||||||
if ( index2 == image_buffer_count ) {
|
if ( index2 == image_buffer_count ) {
|
||||||
return( 0.0 );
|
return 0.0;
|
||||||
}
|
}
|
||||||
Snapshot *snap2 = &image_buffer[index2];
|
ZMPacket *snap2 = &image_buffer[index2];
|
||||||
while ( !snap2->timestamp || !snap2->timestamp->tv_sec ) {
|
while ( !snap2->timestamp.tv_sec ) {
|
||||||
if ( index1 == index2 ) {
|
if ( index1 == index2 ) {
|
||||||
return( 0.0 );
|
return 0.0;
|
||||||
}
|
}
|
||||||
index2 = (index2+1)%image_buffer_count;
|
index2 = (index2+1)%image_buffer_count;
|
||||||
snap2 = &image_buffer[index2];
|
snap2 = &image_buffer[index2];
|
||||||
image_count--;
|
image_count--;
|
||||||
}
|
}
|
||||||
struct timeval time2 = *snap2->timestamp;
|
struct timeval time2 = snap2->timestamp;
|
||||||
|
|
||||||
double time_diff = tvDiffSec( time2, time1 );
|
double time_diff = tvDiffSec( time2, time1 );
|
||||||
|
|
||||||
|
@ -808,9 +801,9 @@ double Monitor::GetFPS() const {
|
||||||
|
|
||||||
if ( curr_fps < 0.0 ) {
|
if ( curr_fps < 0.0 ) {
|
||||||
//Error( "Negative FPS %f, time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d", curr_fps, time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count );
|
//Error( "Negative FPS %f, time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d", curr_fps, time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count );
|
||||||
return( 0.0 );
|
return 0.0;
|
||||||
}
|
}
|
||||||
return( curr_fps );
|
return curr_fps;
|
||||||
}
|
}
|
||||||
|
|
||||||
useconds_t Monitor::GetAnalysisRate() {
|
useconds_t Monitor::GetAnalysisRate() {
|
||||||
|
@ -1028,7 +1021,7 @@ void Monitor::DumpZoneImage( const char *zone_string ) {
|
||||||
if ( ( (!staticConfig.SERVER_ID) || ( staticConfig.SERVER_ID == server_id ) ) && mem_ptr ) {
|
if ( ( (!staticConfig.SERVER_ID) || ( staticConfig.SERVER_ID == server_id ) ) && mem_ptr ) {
|
||||||
Debug(3, "Trying to load from local zmc");
|
Debug(3, "Trying to load from local zmc");
|
||||||
int index = shared_data->last_write_index;
|
int index = shared_data->last_write_index;
|
||||||
Snapshot *snap = &image_buffer[index];
|
ZMPacket *snap = &image_buffer[index];
|
||||||
zone_image = new Image( *snap->image );
|
zone_image = new Image( *snap->image );
|
||||||
} else {
|
} else {
|
||||||
Debug(3, "Trying to load from event");
|
Debug(3, "Trying to load from event");
|
||||||
|
@ -1216,8 +1209,9 @@ bool Monitor::Analyse() {
|
||||||
index = shared_data->last_write_index%image_buffer_count;
|
index = shared_data->last_write_index%image_buffer_count;
|
||||||
}
|
}
|
||||||
|
|
||||||
Snapshot *snap = &image_buffer[index];
|
ZMPacket *snap = &image_buffer[index];
|
||||||
struct timeval *timestamp = snap->timestamp;
|
struct timeval *timestamp = &snap->timestamp;
|
||||||
|
Debug(2,timeval_to_string( *timestamp ) );
|
||||||
Image *snap_image = snap->image;
|
Image *snap_image = snap->image;
|
||||||
|
|
||||||
if ( shared_data->action ) {
|
if ( shared_data->action ) {
|
||||||
|
@ -1267,7 +1261,6 @@ bool Monitor::Analyse() {
|
||||||
if ( static_undef ) {
|
if ( static_undef ) {
|
||||||
// Sure would be nice to be able to assume that these were already initialized. It's just 1 compare/branch, but really not neccessary.
|
// Sure would be nice to be able to assume that these were already initialized. It's just 1 compare/branch, but really not neccessary.
|
||||||
static_undef = false;
|
static_undef = false;
|
||||||
timestamps = new struct timeval *[pre_event_count];
|
|
||||||
images = new Image *[pre_event_count];
|
images = new Image *[pre_event_count];
|
||||||
last_signal = shared_data->signal;
|
last_signal = shared_data->signal;
|
||||||
}
|
}
|
||||||
|
@ -1419,56 +1412,6 @@ bool Monitor::Analyse() {
|
||||||
if ( state == IDLE ) {
|
if ( state == IDLE ) {
|
||||||
shared_data->state = state = TAPE;
|
shared_data->state = state = TAPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
//if ( config.overlap_timed_events )
|
|
||||||
if ( false ) {
|
|
||||||
int pre_index;
|
|
||||||
int pre_event_images = pre_event_count;
|
|
||||||
|
|
||||||
if ( analysis_fps ) {
|
|
||||||
// If analysis fps is set,
|
|
||||||
// compute the index for pre event images in the dedicated buffer
|
|
||||||
pre_index = image_count%pre_event_buffer_count;
|
|
||||||
|
|
||||||
// Seek forward the next filled slot in to the buffer (oldest data)
|
|
||||||
// from the current position
|
|
||||||
while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) {
|
|
||||||
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
|
||||||
// Slot is empty, removing image from counter
|
|
||||||
pre_event_images--;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If analysis fps is not set (analysis performed at capturing framerate),
|
|
||||||
// compute the index for pre event images in the capturing buffer
|
|
||||||
pre_index = ((index + image_buffer_count) - pre_event_count)%image_buffer_count;
|
|
||||||
|
|
||||||
// Seek forward the next filled slot in to the buffer (oldest data)
|
|
||||||
// from the current position
|
|
||||||
while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) {
|
|
||||||
pre_index = (pre_index + 1)%image_buffer_count;
|
|
||||||
// Slot is empty, removing image from counter
|
|
||||||
pre_event_images--;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( pre_event_images ) {
|
|
||||||
if ( analysis_fps ) {
|
|
||||||
for ( int i = 0; i < pre_event_images; i++ ) {
|
|
||||||
timestamps[i] = pre_event_buffer[pre_index].timestamp;
|
|
||||||
images[i] = pre_event_buffer[pre_index].image;
|
|
||||||
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for ( int i = 0; i < pre_event_images; i++ ) {
|
|
||||||
timestamps[i] = image_buffer[pre_index].timestamp;
|
|
||||||
images[i] = image_buffer[pre_index].image;
|
|
||||||
pre_index = (pre_index + 1)%image_buffer_count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
event->AddFrames( pre_event_images, images, timestamps );
|
|
||||||
}
|
|
||||||
} // end if false or config.overlap_timed_events
|
|
||||||
} // end if ! event
|
} // end if ! event
|
||||||
}
|
}
|
||||||
if ( score ) {
|
if ( score ) {
|
||||||
|
@ -1487,13 +1430,13 @@ bool Monitor::Analyse() {
|
||||||
|
|
||||||
// Seek forward the next filled slot in to the buffer (oldest data)
|
// Seek forward the next filled slot in to the buffer (oldest data)
|
||||||
// from the current position
|
// from the current position
|
||||||
while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) {
|
while ( pre_event_images && !pre_event_buffer[pre_index].timestamp.tv_sec ) {
|
||||||
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
||||||
// Slot is empty, removing image from counter
|
// Slot is empty, removing image from counter
|
||||||
pre_event_images--;
|
pre_event_images--;
|
||||||
}
|
}
|
||||||
|
|
||||||
event = new Event( this, *(pre_event_buffer[pre_index].timestamp), cause, noteSetMap );
|
event = new Event( this, pre_event_buffer[pre_index].timestamp, cause, noteSetMap );
|
||||||
} else {
|
} else {
|
||||||
// If analysis fps is not set (analysis performed at capturing framerate),
|
// If analysis fps is not set (analysis performed at capturing framerate),
|
||||||
// compute the index for pre event images in the capturing buffer
|
// compute the index for pre event images in the capturing buffer
|
||||||
|
@ -1504,13 +1447,13 @@ bool Monitor::Analyse() {
|
||||||
|
|
||||||
// Seek forward the next filled slot in to the buffer (oldest data)
|
// Seek forward the next filled slot in to the buffer (oldest data)
|
||||||
// from the current position
|
// from the current position
|
||||||
while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) {
|
while ( pre_event_images && !image_buffer[pre_index].timestamp.tv_sec ) {
|
||||||
pre_index = (pre_index + 1)%image_buffer_count;
|
pre_index = (pre_index + 1)%image_buffer_count;
|
||||||
// Slot is empty, removing image from counter
|
// Slot is empty, removing image from counter
|
||||||
pre_event_images--;
|
pre_event_images--;
|
||||||
}
|
}
|
||||||
|
|
||||||
event = new Event( this, *(image_buffer[pre_index].timestamp), cause, noteSetMap );
|
event = new Event( this, image_buffer[pre_index].timestamp, cause, noteSetMap );
|
||||||
}
|
}
|
||||||
shared_data->last_event_id = event->Id();
|
shared_data->last_event_id = event->Id();
|
||||||
//set up video store data
|
//set up video store data
|
||||||
|
@ -1522,13 +1465,13 @@ bool Monitor::Analyse() {
|
||||||
if ( pre_event_images ) {
|
if ( pre_event_images ) {
|
||||||
if ( analysis_fps ) {
|
if ( analysis_fps ) {
|
||||||
for ( int i = 0; i < pre_event_images; i++ ) {
|
for ( int i = 0; i < pre_event_images; i++ ) {
|
||||||
timestamps[i] = pre_event_buffer[pre_index].timestamp;
|
timestamps[i] = &pre_event_buffer[pre_index].timestamp;
|
||||||
images[i] = pre_event_buffer[pre_index].image;
|
images[i] = pre_event_buffer[pre_index].image;
|
||||||
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
pre_index = (pre_index + 1)%pre_event_buffer_count;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for ( int i = 0; i < pre_event_images; i++ ) {
|
for ( int i = 0; i < pre_event_images; i++ ) {
|
||||||
timestamps[i] = image_buffer[pre_index].timestamp;
|
timestamps[i] = &image_buffer[pre_index].timestamp;
|
||||||
images[i] = image_buffer[pre_index].image;
|
images[i] = image_buffer[pre_index].image;
|
||||||
pre_index = (pre_index + 1)%image_buffer_count;
|
pre_index = (pre_index + 1)%image_buffer_count;
|
||||||
}
|
}
|
||||||
|
@ -1667,7 +1610,7 @@ bool Monitor::Analyse() {
|
||||||
// If analysis fps is set, add analysed image to dedicated pre event buffer
|
// If analysis fps is set, add analysed image to dedicated pre event buffer
|
||||||
int pre_index = image_count%pre_event_buffer_count;
|
int pre_index = image_count%pre_event_buffer_count;
|
||||||
pre_event_buffer[pre_index].image->Assign(*snap->image);
|
pre_event_buffer[pre_index].image->Assign(*snap->image);
|
||||||
memcpy( pre_event_buffer[pre_index].timestamp, snap->timestamp, sizeof(struct timeval) );
|
pre_event_buffer[pre_index].timestamp = snap->timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
image_count++;
|
image_count++;
|
||||||
|
@ -2862,8 +2805,7 @@ int Monitor::Capture() {
|
||||||
|
|
||||||
unsigned int index = image_count % image_buffer_count;
|
unsigned int index = image_count % image_buffer_count;
|
||||||
Image* capture_image = image_buffer[index].image;
|
Image* capture_image = image_buffer[index].image;
|
||||||
ZMPacket packet;
|
ZMPacket *packet = &image_buffer[index];
|
||||||
packet.set_image(capture_image);
|
|
||||||
int captureResult = 0;
|
int captureResult = 0;
|
||||||
|
|
||||||
unsigned int deinterlacing_value = deinterlacing & 0xff;
|
unsigned int deinterlacing_value = deinterlacing & 0xff;
|
||||||
|
@ -2874,14 +2816,17 @@ int Monitor::Capture() {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Capture a new next image */
|
/* Capture a new next image */
|
||||||
captureResult = camera->Capture(packet);
|
captureResult = camera->Capture(*packet);
|
||||||
|
gettimeofday( &packet->timestamp, NULL );
|
||||||
|
|
||||||
if ( FirstCapture ) {
|
if ( FirstCapture ) {
|
||||||
FirstCapture = 0;
|
FirstCapture = 0;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
captureResult = camera->Capture(packet);
|
captureResult = camera->Capture(*packet);
|
||||||
|
gettimeofday( &packet->timestamp, NULL );
|
||||||
|
Debug(2,timeval_to_string( packet->timestamp ) );
|
||||||
if ( captureResult < 0 ) {
|
if ( captureResult < 0 ) {
|
||||||
// Unable to capture image for temporary reason
|
// Unable to capture image for temporary reason
|
||||||
// Fake a signal loss image
|
// Fake a signal loss image
|
||||||
|
@ -2928,7 +2873,7 @@ int Monitor::Capture() {
|
||||||
if ( (index == shared_data->last_read_index) && (function > MONITOR) ) {
|
if ( (index == shared_data->last_read_index) && (function > MONITOR) ) {
|
||||||
Warning( "Buffer overrun at index %d, image %d, slow down capture, speed up analysis or increase ring buffer size", index, image_count );
|
Warning( "Buffer overrun at index %d, image %d, slow down capture, speed up analysis or increase ring buffer size", index, image_count );
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
double approxFps = double(image_buffer_count)/double(now-image_buffer[index].timestamp->tv_sec);
|
double approxFps = double(image_buffer_count)/double(now-image_buffer[index].timestamp.tv_sec);
|
||||||
time_t last_read_delta = now - shared_data->last_read_time;
|
time_t last_read_delta = now - shared_data->last_read_time;
|
||||||
if ( last_read_delta > (image_buffer_count/approxFps) ) {
|
if ( last_read_delta > (image_buffer_count/approxFps) ) {
|
||||||
Warning( "Last image read from shared memory %ld seconds ago, zma may have gone away", last_read_delta )
|
Warning( "Last image read from shared memory %ld seconds ago, zma may have gone away", last_read_delta )
|
||||||
|
@ -2940,7 +2885,7 @@ int Monitor::Capture() {
|
||||||
capture_image->MaskPrivacy( privacy_bitmask );
|
capture_image->MaskPrivacy( privacy_bitmask );
|
||||||
|
|
||||||
if ( config.timestamp_on_capture ) {
|
if ( config.timestamp_on_capture ) {
|
||||||
TimestampImage( capture_image, &packet.timestamp );
|
TimestampImage( capture_image, &packet->timestamp );
|
||||||
}
|
}
|
||||||
int video_stream_id = camera->get_VideoStreamId();
|
int video_stream_id = camera->get_VideoStreamId();
|
||||||
|
|
||||||
|
@ -2955,7 +2900,7 @@ int Monitor::Capture() {
|
||||||
Debug(2, "Have videostore already?");
|
Debug(2, "Have videostore already?");
|
||||||
// I don't know if this is important or not... but I figure we might as well write this last packet out to the store before closing it.
|
// I don't know if this is important or not... but I figure we might as well write this last packet out to the store before closing it.
|
||||||
// Also don't know how much it matters for audio.
|
// Also don't know how much it matters for audio.
|
||||||
int ret = videoStore->writePacket( &packet );
|
int ret = videoStore->writePacket( packet );
|
||||||
if ( ret < 0 ) { //Less than zero and we skipped a frame
|
if ( ret < 0 ) { //Less than zero and we skipped a frame
|
||||||
Warning("Error writing last packet to videostore.");
|
Warning("Error writing last packet to videostore.");
|
||||||
}
|
}
|
||||||
|
@ -2998,25 +2943,25 @@ int Monitor::Capture() {
|
||||||
|
|
||||||
// Buffer video packets, since we are not recording.
|
// Buffer video packets, since we are not recording.
|
||||||
// All audio packets are keyframes, so only if it's a video keyframe
|
// All audio packets are keyframes, so only if it's a video keyframe
|
||||||
if ( ( packet.packet.stream_index == video_stream_id ) && ( packet.keyframe ) ) {
|
if ( ( packet->packet.stream_index == video_stream_id ) && ( packet->keyframe ) ) {
|
||||||
packetqueue.clearQueue( this->GetPreEventCount(), video_stream_id );
|
packetqueue.clearQueue( this->GetPreEventCount(), video_stream_id );
|
||||||
}
|
}
|
||||||
// The following lines should ensure that the queue always begins with a video keyframe
|
// The following lines should ensure that the queue always begins with a video keyframe
|
||||||
if ( packet.packet.stream_index == camera->get_AudioStreamId() ) {
|
if ( packet->packet.stream_index == camera->get_AudioStreamId() ) {
|
||||||
//Debug(2, "Have audio packet, reocrd_audio is (%d) and packetqueue.size is (%d)", record_audio, packetqueue.size() );
|
//Debug(2, "Have audio packet, reocrd_audio is (%d) and packetqueue.size is (%d)", record_audio, packetqueue.size() );
|
||||||
if ( record_audio && packetqueue.size() ) {
|
if ( record_audio && packetqueue.size() ) {
|
||||||
// if it's audio, and we are doing audio, and there is already something in the queue
|
// if it's audio, and we are doing audio, and there is already something in the queue
|
||||||
packetqueue.queuePacket( &packet );
|
packetqueue.queuePacket( packet );
|
||||||
}
|
}
|
||||||
} else if ( packet.packet.stream_index == video_stream_id ) {
|
} else if ( packet->packet.stream_index == video_stream_id ) {
|
||||||
if ( packet.keyframe || packetqueue.size() ) // it's a keyframe or we already have something in the queue
|
if ( packet->keyframe || packetqueue.size() ) // it's a keyframe or we already have something in the queue
|
||||||
packetqueue.queuePacket( &packet );
|
packetqueue.queuePacket( packet );
|
||||||
} // end if audio or video
|
} // end if audio or video
|
||||||
} // end if recording or not
|
} // end if recording or not
|
||||||
|
|
||||||
if ( videoStore ) {
|
if ( videoStore ) {
|
||||||
//Write the packet to our video store, it will be smart enough to know what to do
|
//Write the packet to our video store, it will be smart enough to know what to do
|
||||||
int ret = videoStore->writePacket( &packet );
|
int ret = videoStore->writePacket( packet );
|
||||||
if ( ret < 0 ) { //Less than zero and we skipped a frame
|
if ( ret < 0 ) { //Less than zero and we skipped a frame
|
||||||
Warning("problem writing packet");
|
Warning("problem writing packet");
|
||||||
}
|
}
|
||||||
|
@ -3025,7 +2970,7 @@ int Monitor::Capture() {
|
||||||
|
|
||||||
shared_data->signal = CheckSignal(capture_image);
|
shared_data->signal = CheckSignal(capture_image);
|
||||||
shared_data->last_write_index = index;
|
shared_data->last_write_index = index;
|
||||||
shared_data->last_write_time = image_buffer[index].timestamp->tv_sec;
|
shared_data->last_write_time = image_buffer[index].timestamp.tv_sec;
|
||||||
|
|
||||||
image_count++;
|
image_count++;
|
||||||
|
|
||||||
|
@ -3034,7 +2979,7 @@ int Monitor::Capture() {
|
||||||
if ( !captureResult ) {
|
if ( !captureResult ) {
|
||||||
gettimeofday( &now, NULL );
|
gettimeofday( &now, NULL );
|
||||||
} else {
|
} else {
|
||||||
now.tv_sec = image_buffer[index].timestamp->tv_sec;
|
now.tv_sec = image_buffer[index].timestamp.tv_sec;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we are too fast, we get div by zero. This seems to happen in the case of audio packets.
|
// If we are too fast, we get div by zero. This seems to happen in the case of audio packets.
|
||||||
|
@ -3355,6 +3300,6 @@ int Monitor::PostCapture() {
|
||||||
}
|
}
|
||||||
Monitor::Orientation Monitor::getOrientation() const { return orientation; }
|
Monitor::Orientation Monitor::getOrientation() const { return orientation; }
|
||||||
|
|
||||||
Monitor::Snapshot *Monitor::getSnapshot() {
|
ZMPacket *Monitor::getSnapshot() {
|
||||||
return &image_buffer[ shared_data->last_write_index%image_buffer_count ];
|
return &image_buffer[ shared_data->last_write_index%image_buffer_count ];
|
||||||
}
|
}
|
||||||
|
|
|
@ -153,13 +153,6 @@ protected:
|
||||||
char trigger_showtext[256];
|
char trigger_showtext[256];
|
||||||
} TriggerData;
|
} TriggerData;
|
||||||
|
|
||||||
/* sizeof(Snapshot) expected to be 16 bytes on 32bit and 32 bytes on 64bit */
|
|
||||||
struct Snapshot {
|
|
||||||
struct timeval *timestamp;
|
|
||||||
Image *image;
|
|
||||||
void* padding;
|
|
||||||
};
|
|
||||||
|
|
||||||
//TODO: Technically we can't exclude this struct when people don't have avformat as the Memory.pm module doesn't know about avformat
|
//TODO: Technically we can't exclude this struct when people don't have avformat as the Memory.pm module doesn't know about avformat
|
||||||
//sizeOf(VideoStoreData) expected to be 4104 bytes on 32bit and 64bit
|
//sizeOf(VideoStoreData) expected to be 4104 bytes on 32bit and 64bit
|
||||||
typedef struct {
|
typedef struct {
|
||||||
|
@ -197,7 +190,6 @@ protected:
|
||||||
int last_state;
|
int last_state;
|
||||||
int last_event_id;
|
int last_event_id;
|
||||||
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
MonitorLink( int p_id, const char *p_name );
|
MonitorLink( int p_id, const char *p_name );
|
||||||
~MonitorLink();
|
~MonitorLink();
|
||||||
|
@ -313,9 +305,9 @@ protected:
|
||||||
TriggerData *trigger_data;
|
TriggerData *trigger_data;
|
||||||
VideoStoreData *video_store_data;
|
VideoStoreData *video_store_data;
|
||||||
|
|
||||||
Snapshot *image_buffer;
|
ZMPacket *image_buffer;
|
||||||
Snapshot next_buffer; /* Used by four field deinterlacing */
|
ZMPacket next_buffer; /* Used by four field deinterlacing */
|
||||||
Snapshot *pre_event_buffer;
|
ZMPacket *pre_event_buffer;
|
||||||
|
|
||||||
Camera *camera;
|
Camera *camera;
|
||||||
|
|
||||||
|
@ -444,7 +436,7 @@ public:
|
||||||
unsigned int GetPreEventCount() const { return pre_event_count; };
|
unsigned int GetPreEventCount() const { return pre_event_count; };
|
||||||
State GetState() const;
|
State GetState() const;
|
||||||
int GetImage( int index=-1, int scale=100 );
|
int GetImage( int index=-1, int scale=100 );
|
||||||
Snapshot *getSnapshot();
|
ZMPacket *getSnapshot();
|
||||||
struct timeval GetTimestamp( int index=-1 ) const;
|
struct timeval GetTimestamp( int index=-1 ) const;
|
||||||
void UpdateAdaptiveSkip();
|
void UpdateAdaptiveSkip();
|
||||||
useconds_t GetAnalysisRate();
|
useconds_t GetAnalysisRate();
|
||||||
|
|
|
@ -665,13 +665,13 @@ Debug(2, "Have checking command Queue for connkey: %d", connkey );
|
||||||
if ( (frame_mod == 1) || ((frame_count%frame_mod) == 0) ) {
|
if ( (frame_mod == 1) || ((frame_count%frame_mod) == 0) ) {
|
||||||
if ( !paused && !delayed ) {
|
if ( !paused && !delayed ) {
|
||||||
// Send the next frame
|
// Send the next frame
|
||||||
Monitor::Snapshot *snap = &monitor->image_buffer[index];
|
ZMPacket *snap = &monitor->image_buffer[index];
|
||||||
|
|
||||||
if ( !sendFrame( snap->image, snap->timestamp ) ) {
|
if ( !sendFrame( snap->image, &snap->timestamp ) ) {
|
||||||
Debug(2, "sendFrame failed, quiting.");
|
Debug(2, "sendFrame failed, quiting.");
|
||||||
zm_terminate = true;
|
zm_terminate = true;
|
||||||
}
|
}
|
||||||
memcpy( &last_frame_timestamp, snap->timestamp, sizeof(last_frame_timestamp) );
|
last_frame_timestamp = snap->timestamp;
|
||||||
//frame_sent = true;
|
//frame_sent = true;
|
||||||
|
|
||||||
temp_read_index = temp_write_index;
|
temp_read_index = temp_write_index;
|
||||||
|
@ -679,14 +679,14 @@ Debug(2, "Have checking command Queue for connkey: %d", connkey );
|
||||||
}
|
}
|
||||||
if ( buffered_playback ) {
|
if ( buffered_playback ) {
|
||||||
if ( monitor->shared_data->valid ) {
|
if ( monitor->shared_data->valid ) {
|
||||||
if ( monitor->image_buffer[index].timestamp->tv_sec ) {
|
if ( monitor->image_buffer[index].timestamp.tv_sec ) {
|
||||||
int temp_index = temp_write_index%temp_image_buffer_count;
|
int temp_index = temp_write_index%temp_image_buffer_count;
|
||||||
Debug( 2, "Storing frame %d", temp_index );
|
Debug( 2, "Storing frame %d", temp_index );
|
||||||
if ( !temp_image_buffer[temp_index].valid ) {
|
if ( !temp_image_buffer[temp_index].valid ) {
|
||||||
snprintf( temp_image_buffer[temp_index].file_name, sizeof(temp_image_buffer[0].file_name), "%s/zmswap-i%05d.jpg", swap_path, temp_index );
|
snprintf( temp_image_buffer[temp_index].file_name, sizeof(temp_image_buffer[0].file_name), "%s/zmswap-i%05d.jpg", swap_path, temp_index );
|
||||||
temp_image_buffer[temp_index].valid = true;
|
temp_image_buffer[temp_index].valid = true;
|
||||||
}
|
}
|
||||||
memcpy( &(temp_image_buffer[temp_index].timestamp), monitor->image_buffer[index].timestamp, sizeof(temp_image_buffer[0].timestamp) );
|
temp_image_buffer[temp_index].timestamp = monitor->image_buffer[index].timestamp;
|
||||||
monitor->image_buffer[index].image->WriteJpeg( temp_image_buffer[temp_index].file_name, config.jpeg_file_quality );
|
monitor->image_buffer[index].image->WriteJpeg( temp_image_buffer[temp_index].file_name, config.jpeg_file_quality );
|
||||||
temp_write_index = MOD_ADD( temp_write_index, 1, temp_image_buffer_count );
|
temp_write_index = MOD_ADD( temp_write_index, 1, temp_image_buffer_count );
|
||||||
if ( temp_write_index == temp_read_index ) {
|
if ( temp_write_index == temp_read_index ) {
|
||||||
|
@ -764,7 +764,7 @@ void MonitorStream::SingleImage( int scale ) {
|
||||||
int img_buffer_size = 0;
|
int img_buffer_size = 0;
|
||||||
static JOCTET img_buffer[ZM_MAX_IMAGE_SIZE];
|
static JOCTET img_buffer[ZM_MAX_IMAGE_SIZE];
|
||||||
Image scaled_image;
|
Image scaled_image;
|
||||||
Monitor::Snapshot *snap = monitor->getSnapshot();
|
ZMPacket *snap = monitor->getSnapshot();
|
||||||
Image *snap_image = snap->image;
|
Image *snap_image = snap->image;
|
||||||
|
|
||||||
if ( scale != ZM_SCALE_BASE ) {
|
if ( scale != ZM_SCALE_BASE ) {
|
||||||
|
@ -773,7 +773,7 @@ void MonitorStream::SingleImage( int scale ) {
|
||||||
snap_image = &scaled_image;
|
snap_image = &scaled_image;
|
||||||
}
|
}
|
||||||
if ( !config.timestamp_on_capture ) {
|
if ( !config.timestamp_on_capture ) {
|
||||||
monitor->TimestampImage( snap_image, snap->timestamp );
|
monitor->TimestampImage( snap_image, &snap->timestamp );
|
||||||
}
|
}
|
||||||
snap_image->EncodeJpeg( img_buffer, &img_buffer_size );
|
snap_image->EncodeJpeg( img_buffer, &img_buffer_size );
|
||||||
|
|
||||||
|
@ -784,7 +784,7 @@ void MonitorStream::SingleImage( int scale ) {
|
||||||
|
|
||||||
void MonitorStream::SingleImageRaw( int scale ) {
|
void MonitorStream::SingleImageRaw( int scale ) {
|
||||||
Image scaled_image;
|
Image scaled_image;
|
||||||
Monitor::Snapshot *snap = monitor->getSnapshot();
|
ZMPacket *snap = monitor->getSnapshot();
|
||||||
Image *snap_image = snap->image;
|
Image *snap_image = snap->image;
|
||||||
|
|
||||||
if ( scale != ZM_SCALE_BASE ) {
|
if ( scale != ZM_SCALE_BASE ) {
|
||||||
|
@ -793,7 +793,7 @@ void MonitorStream::SingleImageRaw( int scale ) {
|
||||||
snap_image = &scaled_image;
|
snap_image = &scaled_image;
|
||||||
}
|
}
|
||||||
if ( !config.timestamp_on_capture ) {
|
if ( !config.timestamp_on_capture ) {
|
||||||
monitor->TimestampImage( snap_image, snap->timestamp );
|
monitor->TimestampImage( snap_image, &snap->timestamp );
|
||||||
}
|
}
|
||||||
|
|
||||||
fprintf( stdout, "Content-Length: %d\r\n", snap_image->Size() );
|
fprintf( stdout, "Content-Length: %d\r\n", snap_image->Size() );
|
||||||
|
@ -806,7 +806,7 @@ void MonitorStream::SingleImageZip( int scale ) {
|
||||||
static Bytef img_buffer[ZM_MAX_IMAGE_SIZE];
|
static Bytef img_buffer[ZM_MAX_IMAGE_SIZE];
|
||||||
Image scaled_image;
|
Image scaled_image;
|
||||||
|
|
||||||
Monitor::Snapshot *snap = monitor->getSnapshot();
|
ZMPacket *snap = monitor->getSnapshot();
|
||||||
Image *snap_image = snap->image;
|
Image *snap_image = snap->image;
|
||||||
|
|
||||||
if ( scale != ZM_SCALE_BASE ) {
|
if ( scale != ZM_SCALE_BASE ) {
|
||||||
|
@ -815,7 +815,7 @@ void MonitorStream::SingleImageZip( int scale ) {
|
||||||
snap_image = &scaled_image;
|
snap_image = &scaled_image;
|
||||||
}
|
}
|
||||||
if ( !config.timestamp_on_capture ) {
|
if ( !config.timestamp_on_capture ) {
|
||||||
monitor->TimestampImage( snap_image, snap->timestamp );
|
monitor->TimestampImage( snap_image, &snap->timestamp );
|
||||||
}
|
}
|
||||||
snap_image->Zip( img_buffer, &img_buffer_size );
|
snap_image->Zip( img_buffer, &img_buffer_size );
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,8 @@ ZMPacket::ZMPacket( ) {
|
||||||
image = NULL;
|
image = NULL;
|
||||||
frame = NULL;
|
frame = NULL;
|
||||||
av_init_packet( &packet );
|
av_init_packet( &packet );
|
||||||
packet.size = 0;
|
packet.size = 0; // So we can detect whether it has been filled.
|
||||||
gettimeofday( ×tamp, NULL );
|
timestamp = (struct timeval){0};
|
||||||
}
|
}
|
||||||
|
|
||||||
ZMPacket::ZMPacket( Image *i ) {
|
ZMPacket::ZMPacket( Image *i ) {
|
||||||
|
@ -38,7 +38,7 @@ ZMPacket::ZMPacket( Image *i ) {
|
||||||
image = i;
|
image = i;
|
||||||
frame = NULL;
|
frame = NULL;
|
||||||
av_init_packet( &packet );
|
av_init_packet( &packet );
|
||||||
gettimeofday( ×tamp, NULL );
|
timestamp = (struct timeval){0};
|
||||||
}
|
}
|
||||||
|
|
||||||
ZMPacket::ZMPacket( AVPacket *p ) {
|
ZMPacket::ZMPacket( AVPacket *p ) {
|
||||||
|
@ -71,6 +71,13 @@ ZMPacket::~ZMPacket() {
|
||||||
//}
|
//}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ZMPacket::reset() {
|
||||||
|
zm_av_packet_unref( &packet );
|
||||||
|
if ( frame ) {
|
||||||
|
av_frame_free( &frame );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
int ZMPacket::decode( AVCodecContext *ctx ) {
|
int ZMPacket::decode( AVCodecContext *ctx ) {
|
||||||
Debug(4, "about to decode video" );
|
Debug(4, "about to decode video" );
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ class ZMPacket {
|
||||||
int keyframe;
|
int keyframe;
|
||||||
AVPacket packet; // Input packet, undecoded
|
AVPacket packet; // Input packet, undecoded
|
||||||
AVFrame *frame; // Input image, decoded Theoretically only filled if needed.
|
AVFrame *frame; // Input image, decoded Theoretically only filled if needed.
|
||||||
Image *image; // Our internal image oject representing this frame
|
Image *image; // Our internal image object representing this frame
|
||||||
struct timeval timestamp;
|
struct timeval timestamp;
|
||||||
public:
|
public:
|
||||||
AVPacket *av_packet() { return &packet; }
|
AVPacket *av_packet() { return &packet; }
|
||||||
|
@ -47,6 +47,7 @@ class ZMPacket {
|
||||||
|
|
||||||
int is_keyframe() { return keyframe; };
|
int is_keyframe() { return keyframe; };
|
||||||
int decode( AVCodecContext *ctx );
|
int decode( AVCodecContext *ctx );
|
||||||
|
void reset();
|
||||||
ZMPacket( AVPacket *packet, struct timeval *timestamp );
|
ZMPacket( AVPacket *packet, struct timeval *timestamp );
|
||||||
ZMPacket( AVPacket *packet );
|
ZMPacket( AVPacket *packet );
|
||||||
ZMPacket( AVPacket *packet, AVFrame *frame, Image *image );
|
ZMPacket( AVPacket *packet, AVFrame *frame, Image *image );
|
||||||
|
|
Loading…
Reference in New Issue