Merge branch 'master' into multistream

This commit is contained in:
Isaac Connor 2022-01-10 17:51:50 -05:00
commit fd3a70172d
43 changed files with 869 additions and 255 deletions

View File

@ -518,6 +518,15 @@ endif()
#list(APPEND ZM_BIN_LIBS "${Boost_LIBRARIES}")
#endif()
find_package(GSOAP 2.0.0)
if (GSOAP_FOUND)
set(optlibsfound "${optlibsfound} gsoap")
add_compile_definitions(WITH_GSOAP)
else()
set(optlibsnotfound "${optlibsnotfound} gsoap")
endif()
if(NOT ZM_NO_RTSPSERVER)
set(HAVE_RTSP_SERVER 1)
else()

View File

@ -0,0 +1,113 @@
#
# This module detects if gsoap is installed and determines where the
# include files and libraries are.
#
# This code sets the following variables:
#
# GSOAP_IMPORT_DIR = full path to the gsoap import directory
# GSOAP_LIBRARIES = full path to the gsoap libraries
# GSOAP_SSL_LIBRARIES = full path to the gsoap ssl libraries
# GSOAP_INCLUDE_DIR = include dir to be used when using the gsoap library
# GSOAP_PLUGIN_DIR = gsoap plugins directory
# GSOAP_WSDL2H = wsdl2h binary
# GSOAP_SOAPCPP2 = soapcpp2 binary
# GSOAP_FOUND = set to true if gsoap was found successfully
#
# GSOAP_ROOT
# setting this enables search for gsoap libraries / headers in this location
# -----------------------------------------------------
# GSOAP Import Directories
# -----------------------------------------------------
find_path(GSOAP_IMPORT_DIR
NAMES wsa.h
PATHS ${GSOAP_ROOT}/import ${GSOAP_ROOT}/share/gsoap/import
)
# -----------------------------------------------------
# GSOAP Libraries
# -----------------------------------------------------
find_library(GSOAP_CXX_LIBRARIES
NAMES gsoap++
HINTS ${GSOAP_ROOT}/lib ${GSOAP_ROOT}/lib64
${GSOAP_ROOT}/lib32
DOC "The main gsoap library"
)
find_library(GSOAP_SSL_CXX_LIBRARIES
NAMES gsoapssl++
HINTS ${GSOAP_ROOT}/lib ${GSOAP_ROOT}/lib64
${GSOAP_ROOT}/lib32
DOC "The ssl gsoap library"
)
# -----------------------------------------------------
# GSOAP Include Directories
# -----------------------------------------------------
find_path(GSOAP_INCLUDE_DIR
NAMES stdsoap2.h
HINTS ${GSOAP_ROOT} ${GSOAP_ROOT}/include ${GSOAP_ROOT}/include/*
DOC "The gsoap include directory"
)
# -----------------------------------------------------
# GSOAP plugin Directories
# -----------------------------------------------------
find_path(GSOAP_PLUGIN_DIR
NAMES wsseapi.c
HINTS ${GSOAP_ROOT} /usr/share/gsoap/plugin
DOC "The gsoap plugin directory"
)
# -----------------------------------------------------
# GSOAP Binaries
# ----------------------------------------------------
if(NOT GSOAP_TOOL_DIR)
set(GSOAP_TOOL_DIR GSOAP_ROOT)
endif()
find_program(GSOAP_WSDL2H
NAMES wsdl2h
HINTS ${GSOAP_TOOL_DIR}/bin
DOC "The gsoap bin directory"
)
find_program(GSOAP_SOAPCPP2
NAMES soapcpp2
HINTS ${GSOAP_TOOL_DIR}/bin
DOC "The gsoap bin directory"
)
# -----------------------------------------------------
# GSOAP version
# try to determine the flagfor the 2.7.6 compatiblity, break with 2.7.13 and re-break with 2.7.16
# ----------------------------------------------------
if(GSOAP_SOAPCPP2)
execute_process(COMMAND ${GSOAP_SOAPCPP2} "-V" OUTPUT_VARIABLE GSOAP_STRING_VERSION ERROR_VARIABLE GSOAP_STRING_VERSION )
string(REGEX MATCH "[0-9]*\\.[0-9]*\\.[0-9]*" GSOAP_VERSION ${GSOAP_STRING_VERSION})
endif()
# -----------------------------------------------------
# GSOAP_276_COMPAT_FLAGS and GSOAPVERSION
# try to determine the flagfor the 2.7.6 compatiblity, break with 2.7.13 and re-break with 2.7.16
# ----------------------------------------------------
if( "${GSOAP_VERSION}" VERSION_LESS "2.7.6")
set(GSOAP_276_COMPAT_FLAGS "")
elseif ( "${GSOAP_VERSION}" VERSION_LESS "2.7.14")
set(GSOAP_276_COMPAT_FLAGS "-z")
else ( "${GSOAP_VERSION}" VERSION_LESS "2.7.14")
set(GSOAP_276_COMPAT_FLAGS "-z1 -z2")
endif ( "${GSOAP_VERSION}" VERSION_LESS "2.7.6")
# -----------------------------------------------------
# handle the QUIETLY and REQUIRED arguments and set GSOAP_FOUND to TRUE if
# all listed variables are TRUE
# -----------------------------------------------------
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GSOAP DEFAULT_MSG GSOAP_CXX_LIBRARIES
GSOAP_INCLUDE_DIR GSOAP_WSDL2H GSOAP_SOAPCPP2)
mark_as_advanced(GSOAP_INCLUDE_DIR GSOAP_LIBRARIES GSOAP_WSDL2H GSOAP_SOAPCPP2)
if(GSOAP_FOUND)
if(GSOAP_FIND_REQUIRED AND GSOAP_FIND_VERSION AND ${GSOAP_VERSION} VERSION_LESS ${GSOAP_FIND_VERSION})
message(SEND_ERROR "Found GSOAP version ${GSOAP_VERSION} less then required ${GSOAP_FIND_VERSION}.")
endif()
endif()

View File

@ -467,6 +467,7 @@ CREATE TABLE `Monitors` (
`ONVIF_Username` VARCHAR(64) NOT NULL DEFAULT '',
`ONVIF_Password` VARCHAR(64) NOT NULL DEFAULT '',
`ONVIF_Options` VARCHAR(64) NOT NULL DEFAULT '',
`ONVIF_Event_Listener` BOOLEAN NOT NULL DEFAULT FALSE,
`Device` tinytext NOT NULL default '',
`Channel` tinyint(3) unsigned NOT NULL default '0',
`Format` int(10) unsigned NOT NULL default '0',

View File

@ -1,2 +1,21 @@
/* Change Cause from varchar(32) to TEXT. We now include alarmed zone name */
ALTER TABLE `Events` MODIFY `Cause` TEXT;
--
-- Update Monitors table to have a ONVIF_Event_Listener Column
--
SELECT 'Checking for ONVIF_Event_Listener in Monitors';
SET @s = (SELECT IF(
(SELECT COUNT(*)
FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'Monitors'
AND table_schema = DATABASE()
AND column_name = 'ONVIF_Event_Listener'
) > 0,
"SELECT 'Column ONVIF_Event_Listener already exists in Monitors'",
"ALTER TABLE `Monitors` ADD COLUMN `ONVIF_Event_Listener` BOOLEAN NOT NULL default false AFTER `ONVIF_Options`"
));
PREPARE stmt FROM @s;
EXECUTE stmt;

View File

@ -36,7 +36,7 @@
%global _hardened_build 1
Name: zoneminder
Version: 1.37.6
Version: 1.37.7
Release: 1%{?dist}
Summary: A camera monitoring and analysis tool
Group: System Environment/Daemons

View File

@ -30,6 +30,7 @@ Build-Depends: debhelper (>= 11), sphinx-doc, python3-sphinx, dh-linktree, dh-ap
,libdata-entropy-perl
,libvncserver-dev
,libjwt-gnutls-dev|libjwt-dev
,libgsoap-dev
Standards-Version: 4.5.0
Homepage: https://www.zoneminder.com/
@ -73,6 +74,7 @@ Depends: ${shlibs:Depends}, ${misc:Depends}, ${perl:Depends}
,libdata-entropy-perl
,libvncclient1|libvncclient0
,libjwt-gnutls0|libjwt0
,libgsoap-2.8.104|libgsoap-2.8.91|libgsoap-2.8.75|libgsoap-2.8.60|libgsoap10
Recommends: ${misc:Recommends}
,libapache2-mod-php | php-fpm
,default-mysql-server | mariadb-server | virtual-mysql-server

View File

@ -56,6 +56,13 @@ $serial = $primary_key = 'Id';
Enabled
LinkedMonitors
Triggers
EventStartCommand
EventEndCommand
ONVIF_URL
ONVIF_Username
ONVIF_Password
ONVIF_Options
ONVIF_Event_Listener
Device
Channel
Format

View File

@ -6,6 +6,7 @@ configure_file(zm_config_data.h.in "${CMAKE_BINARY_DIR}/zm_config_data.h" @ONLY)
# Group together all the source files that are used by all the binaries (zmc, zmu, zms etc)
set(ZM_BIN_SRC_FILES
zm_analysis_thread.cpp
zm_poll_thread.cpp
zm_buffer.cpp
zm_camera.cpp
zm_comms.cpp
@ -67,6 +68,57 @@ set(ZM_BIN_SRC_FILES
zm_zone.cpp
zm_storage.cpp)
if(GSOAP_FOUND)
set(ZM_BIN_SRC_FILES
${ZM_BIN_SRC_FILES}
${CMAKE_BINARY_DIR}/generated/soapPullPointSubscriptionBindingProxy.cpp
${CMAKE_BINARY_DIR}/generated/soapC.cpp
${GSOAP_PLUGIN_DIR}/smdevp.c
${GSOAP_PLUGIN_DIR}/mecevp.c
${GSOAP_PLUGIN_DIR}/wsaapi.c
${GSOAP_PLUGIN_DIR}/wsseapi.c
${GSOAP_PLUGIN_DIR}/../custom/struct_timeval.c
)
SET(GCC_COMPILE_FLAGS "-DWITH_OPENSSL -DWITH_DOM")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${GCC_COMPILE_FLAGS}")
#Create the directory that will host files generated by GSOAP
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/generated)
#some files are generated by gsoap
set_source_files_properties( ${CMAKE_BINARY_DIR}/generated/soapClientLib.c PROPERTIES GENERATED TRUE )
set_source_files_properties( ${CMAKE_BINARY_DIR}/generated/soapC.c PROPERTIES GENERATED TRUE )
set_source_files_properties( ${CMAKE_BINARY_DIR}/generated/soapPullPointSubscriptionBindingProxy.cpp PROPERTIES GENERATED TRUE )
set_source_files_properties( ${GSOAP_PLUGIN_DIR}/smdevp.c PROPERTIES LANGUAGE CXX)
set_source_files_properties( ${GSOAP_PLUGIN_DIR}/mecevp.c PROPERTIES LANGUAGE CXX)
set_source_files_properties( ${GSOAP_PLUGIN_DIR}/wsaapi.c PROPERTIES LANGUAGE CXX)
set_source_files_properties( ${GSOAP_PLUGIN_DIR}/wsseapi.c PROPERTIES LANGUAGE CXX)
set_source_files_properties( ${GSOAP_PLUGIN_DIR}/../custom/struct_timeval.c PROPERTIES LANGUAGE CXX)
#Create a cmake target that generate gsoap files
add_custom_command(
OUTPUT ${CMAKE_BINARY_DIR}/generated/soapC.cpp
OUTPUT ${CMAKE_BINARY_DIR}/generated/soapPullPointSubscriptionBindingProxy.cpp
COMMAND ${GSOAP_WSDL2H} -d -P -O2 -o ${CMAKE_BINARY_DIR}/generated/bindings.h http://www.onvif.org/onvif/ver10/events/wsdl/event.wsdl
COMMAND echo '\#import \"wsse.h\"' >> ${CMAKE_BINARY_DIR}/generated/bindings.h
COMMAND echo '\#import \"struct_timeval.h\"' >> ${CMAKE_BINARY_DIR}/generated/bindings.h
COMMAND ${GSOAP_SOAPCPP2} -n -2 -C -I ${GSOAP_PLUGIN_DIR}/.. -I ${GSOAP_PLUGIN_DIR}/../import/ -I ${GSOAP_PLUGIN_DIR}/../custom/ -d ${CMAKE_BINARY_DIR}/generated -j -x ${CMAKE_BINARY_DIR}/generated/bindings.h
COMMENT "CREATING STUBS AND GLUE CODE"
)
add_custom_target(GSOAP_GENERATION_TARGET
DEPENDS ${CMAKE_BINARY_DIR}/generated/soapC.cpp
DEPENDS ${CMAKE_BINARY_DIR}/generated/soapPullPointSubscriptionBindingProxy.cpp
DEPENDS ${GSOAP_PLUGIN_DIR}/smdevp.c
DEPENDS ${GSOAP_PLUGIN_DIR}/mecevp.c
DEPENDS ${GSOAP_PLUGIN_DIR}/wsaapi.c
DEPENDS ${GSOAP_PLUGIN_DIR}/wsseapi.c
DEPENDS ${GSOAP_PLUGIN_DIR}/../custom/struct_timeval.c
)
endif()
# A fix for cmake recompiling the source files for every target.
add_library(zm STATIC ${ZM_BIN_SRC_FILES})
@ -75,6 +127,15 @@ target_include_directories(zm
${CMAKE_BINARY_DIR}
${CMAKE_CURRENT_SOURCE_DIR})
if(GSOAP_FOUND)
target_include_directories(zm
PUBLIC
${CMAKE_BINARY_DIR}/generated
${GSOAP_PLUGIN_DIR}/..
${GSOAP_INCLUDE_DIR})
endif()
target_link_libraries(zm
PUBLIC
FFMPEG::avcodec
@ -89,6 +150,15 @@ target_link_libraries(zm
PRIVATE
zm-core-interface)
if(GSOAP_FOUND)
target_link_libraries(zm
PUBLIC
${GSOAP_CXX_LIBRARIES}
${GSOAP_SSL_CXX_LIBRARIES}
${OPENSSL_SSL_LIBRARY}
${OPENSSL_CRYPTO_LIBRARY})
endif()
if(${ZM_JWT_BACKEND} STREQUAL "jwt_cpp")
target_link_libraries(zm
PUBLIC
@ -110,6 +180,11 @@ add_executable(zms zms.cpp)
add_executable(zmu zmu.cpp)
add_executable(zmbenchmark zmbenchmark.cpp)
if(GSOAP_FOUND)
#Make sure that the client is compiled only after gsoap has been processed
add_dependencies(zmc GSOAP_GENERATION_TARGET)
endif()
target_link_libraries(zmc
PRIVATE
zm-core-interface

View File

@ -702,9 +702,7 @@ bool EventStream::sendFrame(Microseconds delta_us) {
// This needs to be abstracted. If we are saving jpgs, then load the capture file.
// If we are only saving analysis frames, then send that.
if (event_data->SaveJPEGs & 1) {
filepath = stringtf(staticConfig.capture_file_format.c_str(), event_data->path.c_str(), curr_frame_id);
} else if (event_data->SaveJPEGs & 2) {
if ((frame_type == FRAME_ANALYSIS) && (event_data->SaveJPEGs & 2)) {
filepath = stringtf(staticConfig.analyse_file_format.c_str(), event_data->path.c_str(), curr_frame_id);
if (stat(filepath.c_str(), &filestat) < 0) {
Debug(1, "analyze file %s not found will try to stream from other", filepath.c_str());
@ -714,7 +712,9 @@ bool EventStream::sendFrame(Microseconds delta_us) {
filepath = "";
}
}
} else if ( !ffmpeg_input ) {
} else if (event_data->SaveJPEGs & 1) {
filepath = stringtf(staticConfig.capture_file_format.c_str(), event_data->path.c_str(), curr_frame_id);
} else if (!ffmpeg_input) {
Fatal("JPEGS not saved. zms is not capable of streaming jpegs from mp4 yet");
return false;
}
@ -991,45 +991,51 @@ void EventStream::runStream() {
// Have to reset start to now when replaying
start = now;
}
frame_data = &event_data->frames[curr_frame_id-1];
// frame_data->delta is the time since last frame as a float in seconds
// but what if we are skipping frames? We need the distance from the last frame sent
// Also, what about reverse? needs to be absolute value
if ((unsigned int)curr_frame_id <= event_data->frame_count) {
frame_data = &event_data->frames[curr_frame_id-1];
// There are two ways to go about this, not sure which is correct.
// you can calculate the relationship between now and the start
// or calc the relationship from the last frame. I think from the start is better as it self-corrects
//
if (last_frame_offset != Seconds(0)) {
// We assume that we are going forward and the next frame is in the future.
delta = std::chrono::duration_cast<Microseconds>(frame_data->offset - (now - start));
// frame_data->delta is the time since last frame as a float in seconds
// but what if we are skipping frames? We need the distance from the last frame sent
// Also, what about reverse? needs to be absolute value
Debug(2, "New delta: now - start = %" PRIu64 " us offset %" PRIi64 " us- elapsed = %" PRIu64 " us",
static_cast<int64>(std::chrono::duration_cast<Microseconds>(now - start).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(frame_data->offset).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(delta).count()));
} else {
Debug(2, "No last frame_offset, no sleep");
delta = Seconds(0);
}
last_frame_offset = frame_data->offset;
// There are two ways to go about this, not sure which is correct.
// you can calculate the relationship between now and the start
// or calc the relationship from the last frame. I think from the start is better as it self-corrects
//
if (last_frame_offset != Seconds(0)) {
// We assume that we are going forward and the next frame is in the future.
delta = std::chrono::duration_cast<Microseconds>(frame_data->offset - (now - start));
if (send_frame && type != STREAM_MPEG) {
if (delta != Seconds(0)) {
if (delta > MAX_SLEEP) {
Debug(1, "Limiting sleep to %" PRIi64 " ms because calculated sleep is too long: %" PRIi64" us",
Debug(2, "New delta: now - start = %" PRIu64 " us offset %" PRIi64 " us- elapsed = %" PRIu64 " us",
static_cast<int64>(std::chrono::duration_cast<Microseconds>(now - start).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(frame_data->offset).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(delta).count()));
} else {
Debug(2, "No last frame_offset, no sleep");
delta = Seconds(0);
}
last_frame_offset = frame_data->offset;
if (send_frame && type != STREAM_MPEG) {
if (delta != Seconds(0)) {
if (delta > MAX_SLEEP) {
Debug(1, "Limiting sleep to %" PRIi64 " ms because calculated sleep is too long: %" PRIi64" us",
static_cast<int64>(std::chrono::duration_cast<Milliseconds>(MAX_SLEEP).count()),
static_cast<int64>(std::chrono::duration_cast<Microseconds>(delta).count()));
delta = MAX_SLEEP;
}
delta = MAX_SLEEP;
}
std::this_thread::sleep_for(delta);
Debug(3, "Done sleeping: %" PRIi64 " us",
std::this_thread::sleep_for(delta);
Debug(3, "Done sleeping: %" PRIi64 " us",
static_cast<int64>(std::chrono::duration_cast<Microseconds>(delta).count()));
}
}
}
} // end if need to sleep
} else {
Debug(1, "invalid curr_frame_id %ld !< %lu", curr_frame_id, event_data->frame_count);
} // end if not at end of event
} else {
// Paused
delta = std::chrono::duration_cast<Microseconds>(FPSeconds(
ZM_RATE_BASE / ((base_fps ? base_fps : 1) * (replay_rate ? abs(replay_rate * 2) : 2))));
@ -1088,11 +1094,6 @@ void EventStream::runStream() {
} // end void EventStream::runStream()
bool EventStream::send_file(const std::string &filepath) {
static unsigned char temp_img_buffer[ZM_MAX_IMAGE_SIZE];
int img_buffer_size = 0;
uint8_t *img_buffer = temp_img_buffer;
FILE *fdj = nullptr;
fdj = fopen(filepath.c_str(), "rb");
if ( !fdj ) {
@ -1122,10 +1123,16 @@ bool EventStream::send_file(const std::string &filepath) {
// Success
fclose(fdj); /* Close the file handle */
return true;
} else {
Debug(1, "Failed to sendfile?");
}
Warning("Unable to send raw frame %ld: %s rc %d", curr_frame_id, strerror(errno), rc);
#endif
img_buffer_size = fread(img_buffer, 1, sizeof(temp_img_buffer), fdj);
static unsigned char temp_img_buffer[ZM_MAX_IMAGE_SIZE];
uint8_t *img_buffer = temp_img_buffer;
int img_buffer_size = fread(img_buffer, 1, sizeof(temp_img_buffer), fdj);
fclose(fdj); /* Close the file handle */
if ( !img_buffer_size ) {
Info("Unable to read raw frame %ld: %s", curr_frame_id, strerror(errno));

View File

@ -293,18 +293,16 @@ int FfmpegCamera::OpenFfmpeg() {
mFormatContext->interrupt_callback.opaque = this;
ret = avformat_open_input(&mFormatContext, mPath.c_str(), nullptr, &opts);
if ( ret != 0 )
{
if (ret != 0) {
logPrintf(Logger::ERROR + monitor->Importance(),
"Unable to open input %s due to: %s", mPath.c_str(),
av_make_error_string(ret).c_str());
if ( mFormatContext ) {
if (mFormatContext) {
avformat_close_input(&mFormatContext);
mFormatContext = nullptr;
}
av_dict_free(&opts);
return -1;
}
AVDictionaryEntry *e = nullptr;

View File

@ -31,8 +31,7 @@ int FFmpeg_Input::Open(
const AVStream * audio_in_stream,
const AVCodecContext * audio_in_ctx
) {
video_stream_id = video_in_stream->index;
int max_stream_index = video_in_stream->index;
int max_stream_index = video_stream_id = video_in_stream->index;
if ( audio_in_stream ) {
max_stream_index = video_in_stream->index > audio_in_stream->index ? video_in_stream->index : audio_in_stream->index;

View File

@ -28,7 +28,6 @@ class FifoStream : public StreamBase {
std::string stream_path;
int total_read;
int bytes_read;
unsigned int frame_count;
protected:
typedef enum { UNKNOWN, MJPEG, RAW } StreamType;
@ -39,9 +38,9 @@ class FifoStream : public StreamBase {
public:
FifoStream() :
StreamBase(),
total_read(0),
bytes_read(0),
frame_count(0),
stream_type(UNKNOWN)
{}

View File

@ -46,9 +46,9 @@ FileCamera::FileCamera(
p_hue,
p_colour,
p_capture,
p_record_audio)
p_record_audio),
path(p_path)
{
path = std::string(p_path);
if (capture) {
Initialise();
}

View File

@ -214,25 +214,26 @@ Image::Image(int p_width, int p_linesize, int p_height, int p_colours, int p_sub
update_function_pointers();
}
Image::Image(const AVFrame *frame) {
Image::Image(const AVFrame *frame) :
colours(ZM_COLOUR_RGB32),
padding(0),
subpixelorder(ZM_SUBPIX_ORDER_RGBA),
imagePixFormat(AV_PIX_FMT_RGBA),
buffer(0),
holdbuffer(0)
{
width = frame->width;
height = frame->height;
pixels = width*height;
zm_dump_video_frame(frame, "Image.Assign(frame)");
// FIXME
colours = ZM_COLOUR_RGB32;
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
imagePixFormat = AV_PIX_FMT_RGBA;
//(AVPixelFormat)frame->format;
//(AVPixelFormat)frame->format;
size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 32);
// av_image_get_linesize isn't aligned, so we have to do that.
linesize = FFALIGN(av_image_get_linesize(AV_PIX_FMT_RGBA, width, 0), 32);
padding = 0;
buffer = nullptr;
holdbuffer = 0;
AllocImgBuffer(size);
this->Assign(frame);
}
@ -1677,15 +1678,15 @@ void Image::Overlay( const Image &image ) {
}
/* RGB32 compatible: complete */
void Image::Overlay( const Image &image, unsigned int x, unsigned int y ) {
void Image::Overlay( const Image &image, const unsigned int lo_x, const unsigned int lo_y ) {
if ( !(width < image.width || height < image.height) ) {
Panic("Attempt to overlay image too big for destination, %dx%d > %dx%d",
image.width, image.height, width, height );
}
if ( !(width < (x+image.width) || height < (y+image.height)) ) {
if ( !(width < (lo_x+image.width) || height < (lo_y+image.height)) ) {
Panic("Attempt to overlay image outside of destination bounds, %dx%d @ %dx%d > %dx%d",
image.width, image.height, x, y, width, height );
image.width, image.height, lo_x, lo_y, width, height );
}
if ( !(colours == image.colours) ) {
@ -1693,10 +1694,8 @@ void Image::Overlay( const Image &image, unsigned int x, unsigned int y ) {
colours, image.colours);
}
unsigned int lo_x = x;
unsigned int lo_y = y;
unsigned int hi_x = (x+image.width)-1;
unsigned int hi_y = (y+image.height-1);
unsigned int hi_x = (lo_x+image.width)-1;
unsigned int hi_y = (lo_y+image.height-1);
if ( colours == ZM_COLOUR_GRAY8 ) {
const uint8_t *psrc = image.buffer;
for ( unsigned int y = lo_y; y <= hi_y; y++ ) {
@ -2732,7 +2731,7 @@ void Image::Flip( bool leftright ) {
AssignDirect(width, height, colours, subpixelorder, flip_buffer, size, ZM_BUFTYPE_ZM);
}
void Image::Scale(unsigned int factor) {
void Image::Scale(const unsigned int factor) {
if ( !factor ) {
Error("Bogus scale factor %d found", factor);
return;
@ -2756,15 +2755,13 @@ void Image::Scale(unsigned int factor) {
unsigned int h_count = ZM_SCALE_BASE/2;
unsigned int last_h_index = 0;
unsigned int last_w_index = 0;
unsigned int h_index;
for ( unsigned int y = 0; y < height; y++ ) {
unsigned char *ps = &buffer[y*wc];
unsigned int w_count = ZM_SCALE_BASE/2;
unsigned int w_index;
last_w_index = 0;
for ( unsigned int x = 0; x < width; x++ ) {
w_count += factor;
w_index = w_count/ZM_SCALE_BASE;
unsigned int w_index = w_count/ZM_SCALE_BASE;
for (unsigned int f = last_w_index; f < w_index; f++ ) {
for ( unsigned int c = 0; c < colours; c++ ) {
*pd++ = *(ps+c);
@ -2774,7 +2771,7 @@ void Image::Scale(unsigned int factor) {
last_w_index = w_index;
}
h_count += factor;
h_index = h_count/ZM_SCALE_BASE;
unsigned int h_index = h_count/ZM_SCALE_BASE;
for ( unsigned int f = last_h_index+1; f < h_index; f++ ) {
memcpy(pd, pd-nwc, nwc);
pd += nwc;
@ -2786,17 +2783,14 @@ void Image::Scale(unsigned int factor) {
} else {
unsigned char *pd = scale_buffer;
unsigned int wc = width*colours;
unsigned int xstart = factor/2;
unsigned int ystart = factor/2;
unsigned int h_count = ystart;
unsigned int h_count = factor/2;
unsigned int last_h_index = 0;
unsigned int last_w_index = 0;
unsigned int h_index;
for ( unsigned int y = 0; y < height; y++ ) {
h_count += factor;
h_index = h_count/ZM_SCALE_BASE;
unsigned int h_index = h_count/ZM_SCALE_BASE;
if ( h_index > last_h_index ) {
unsigned int w_count = xstart;
unsigned int w_count = factor/2;
unsigned int w_index;
last_w_index = 0;
@ -2825,6 +2819,7 @@ void Image::Scale(unsigned int factor) {
void Image::Deinterlace_Discard() {
/* Simple deinterlacing. Copy the even lines into the odd lines */
// ICON: These can be drastically improved. But who cares?
if ( colours == ZM_COLOUR_GRAY8 ) {
const uint8_t *psrc;
@ -3107,9 +3102,9 @@ __attribute__((noinline,__target__("sse2")))
#endif
void sse2_fastblend(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count, double blendpercent) {
#if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE))
static uint32_t divider = 0;
static uint32_t clearmask = 0;
static double current_blendpercent = 0.0;
static uint32_t clearmask = 0;
static uint32_t divider = 0;
if ( current_blendpercent != blendpercent ) {
/* Attempt to match the blending percent to one of the possible values */
@ -3310,10 +3305,10 @@ void neon32_armv7_fastblend(const uint8_t* col1, const uint8_t* col2, uint8_t* r
__attribute__((noinline)) void neon64_armv8_fastblend(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count, double blendpercent) {
#if (defined(__aarch64__) && !defined(ZM_STRIP_NEON))
static int8_t divider = 0;
static double current_blendpercent = 0.0;
if(current_blendpercent != blendpercent) {
if (current_blendpercent != blendpercent) {
static int8_t divider = 0;
/* Attempt to match the blending percent to one of the possible values */
if(blendpercent < 2.34375) {
// 1.5625% blending

View File

@ -66,6 +66,14 @@
#define MAP_LOCKED 0
#endif
#ifdef WITH_GSOAP
//Workaround for the gsoap library on RHEL
struct Namespace namespaces[] =
{
{NULL, NULL} // end of table
};
#endif
// This is the official SQL (and ordering of the fields) to load a Monitor.
// It will be used whereever a Monitor dbrow is needed. WHERE conditions can be appended
std::string load_monitor_sql =
@ -83,7 +91,7 @@ std::string load_monitor_sql =
"`SectionLength`, `MinSectionLength`, `FrameSkip`, `MotionFrameSkip`, "
"`FPSReportInterval`, `RefBlendPerc`, `AlarmRefBlendPerc`, `TrackMotion`, `Exif`,"
"`RTSPServer`, `RTSPStreamName`,"
"`ONVIF_URL`, `ONVIF_Username`, `ONVIF_Password`, `ONVIF_Options`,"
"`ONVIF_URL`, `ONVIF_Username`, `ONVIF_Password`, `ONVIF_Options`, `ONVIF_Event_Listener`, "
"`SignalCheckPoints`, `SignalCheckColour`, `Importance`-1 FROM `Monitors`";
std::string CameraType_Strings[] = {
@ -235,7 +243,7 @@ bool Monitor::MonitorLink::connect() {
return true;
}
return false;
} // end bool Monitor::MonitorLink::connect()
} // end bool Monitor::MonitorLink::connect()
bool Monitor::MonitorLink::disconnect() {
if (connected) {
@ -421,6 +429,10 @@ Monitor::Monitor()
privacy_bitmask(nullptr),
n_linked_monitors(0),
linked_monitors(nullptr),
#ifdef WITH_GSOAP
soap(nullptr),
ONVIF_Closes_Event(FALSE),
#endif
red_val(0),
green_val(0),
blue_val(0),
@ -457,7 +469,7 @@ Monitor::Monitor()
"SectionLength, MinSectionLength, FrameSkip, MotionFrameSkip, "
"FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, Exif,"
"`RTSPServer`,`RTSPStreamName`,
"`ONVIF_URL`, `ONVIF_Username`, `ONVIF_Password`, `ONVIF_Options`,"
"`ONVIF_URL`, `ONVIF_Username`, `ONVIF_Password`, `ONVIF_Options`, `ONVIF_Event_Listener`, "
"SignalCheckPoints, SignalCheckColour, Importance-1 FROM Monitors";
*/
@ -486,7 +498,7 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
} else if ( ! strcmp(dbrow[col], "Libvlc") ) {
type = LIBVLC;
} else if ( ! strcmp(dbrow[col], "cURL") ) {
type = CURL;
type = LIBCURL;
} else if ( ! strcmp(dbrow[col], "VNC") ) {
type = VNC;
} else {
@ -635,8 +647,7 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
onvif_username = std::string(dbrow[col] ? dbrow[col] : ""); col++;
onvif_password = std::string(dbrow[col] ? dbrow[col] : ""); col++;
onvif_options = std::string(dbrow[col] ? dbrow[col] : ""); col++;
importance = dbrow[col] ? atoi(dbrow[col]) : 0;// col++;
onvif_event_listener = (*dbrow[col] != '0'); col++;
/*"SignalCheckPoints, SignalCheckColour, Importance-1 FROM Monitors"; */
signal_check_points = atoi(dbrow[col]); col++;
@ -649,6 +660,7 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
blue_val = BLUE_VAL_BGRA(signal_check_colour);
grayscale_val = signal_check_colour & 0xff; /* Clear all bytes but lowest byte */
importance = dbrow[col] ? atoi(dbrow[col]) : 0;// col++;
if (importance < 0) importance = 0; // Should only be >= 0
// How many frames we need to have before we start analysing
@ -670,8 +682,9 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
mem_size = sizeof(SharedData)
+ sizeof(TriggerData)
+ sizeof(VideoStoreData) //Information to pass back to the capture process
+ (image_buffer_count * sizeof(struct timeval))
+ (image_buffer_count * image_size)
+ (image_buffer_count*sizeof(struct timeval))
+ (image_buffer_count*image_size)
+ image_size // alarm_image
+ 64; /* Padding used to permit aligning the images buffer to 64 byte boundary */
Debug(1,
@ -860,7 +873,7 @@ void Monitor::LoadCamera() {
#endif // HAVE_LIBVLC
break;
}
case CURL: {
case LIBCURL: {
#if HAVE_LIBCURL
camera = zm::make_unique<cURLCamera>(this,
path.c_str(),
@ -1027,6 +1040,12 @@ bool Monitor::connect() {
image_buffer[i] = new Image(width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]));
image_buffer[i]->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
}
alarm_image.AssignDirect(width, height, camera->Colours(), camera->SubpixelOrder(),
&(shared_images[image_buffer_count*camera->ImageSize()]),
camera->ImageSize(),
ZM_BUFTYPE_DONTFREE
);
alarm_image.HoldBuffer(true); /* Don't release the internal buffer or replace it with another */
Debug(3, "Allocated %zu %zu image buffers", image_buffer.capacity(), image_buffer.size());
if (purpose == CAPTURE) {
@ -1068,6 +1087,45 @@ bool Monitor::connect() {
video_store_data->size = sizeof(VideoStoreData);
usedsubpixorder = camera->SubpixelOrder(); // Used in CheckSignal
shared_data->valid = true;
//ONVIF Setup
#ifdef WITH_GSOAP
ONVIF_Trigger_State = FALSE;
if (onvif_event_listener) { //Temporarily using this option to enable the feature
Debug(1, "Starting ONVIF");
ONVIF_Healthy = FALSE;
if (onvif_options.find("closes_event") != std::string::npos) { //Option to indicate that ONVIF will send a close event message
ONVIF_Closes_Event = TRUE;
}
tev__PullMessages.Timeout = "PT600S";
tev__PullMessages.MessageLimit = 100;
soap = soap_new();
soap->connect_timeout = 5;
soap->recv_timeout = 5;
soap->send_timeout = 5;
soap_register_plugin(soap, soap_wsse);
proxyEvent = PullPointSubscriptionBindingProxy(soap);
std::string full_url = onvif_url + "/Events";
proxyEvent.soap_endpoint = full_url.c_str();
set_credentials(soap);
Debug(1, "ONVIF Endpoint: %s", proxyEvent.soap_endpoint);
if (proxyEvent.CreatePullPointSubscription(&request, response) != SOAP_OK) {
Warning("Couldn't create subscription!");
} else {
//Empty the stored messages
set_credentials(soap);
if (proxyEvent.PullMessages(response.SubscriptionReference.Address, NULL, &tev__PullMessages, tev__PullMessagesResponse) != SOAP_OK) {
Warning("Couldn't do initial event pull! %s", response.SubscriptionReference.Address);
} else {
Debug(1, "Good Initial ONVIF Pull");
ONVIF_Healthy = TRUE;
}
}
} else {
Debug(1, "Not Starting ONVIF");
}
//End ONVIF Setup
#endif
} else if (!shared_data->valid) {
Error("Shared data not initialised by capture daemon for monitor %s", name.c_str());
return false;
@ -1186,6 +1244,10 @@ void Monitor::AddPrivacyBitmask() {
privacy_bitmask = privacy_image->Buffer();
}
Image *Monitor::GetAlarmImage() {
return &alarm_image;
}
int Monitor::GetImage(int32_t index, int scale) {
if (index < 0 || index > image_buffer_count) {
Debug(1, "Invalid index %d passed. image_buffer_count = %d", index, image_buffer_count);
@ -1200,26 +1262,23 @@ int Monitor::GetImage(int32_t index, int scale) {
return 0;
}
Image *image;
std::string filename = stringtf("Monitor%u.jpg", id);
// If we are going to be modifying the snapshot before writing, then we need to copy it
if ((scale != ZM_SCALE_BASE) || (!config.timestamp_on_capture)) {
alarm_image.Assign(*image_buffer[index]);
Image image;
image.Assign(*image_buffer[index]);
if (scale != ZM_SCALE_BASE) {
alarm_image.Scale(scale);
image.Scale(scale);
}
if (!config.timestamp_on_capture) {
TimestampImage(&alarm_image, SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
TimestampImage(&image, SystemTimePoint(zm::chrono::duration_cast<Microseconds>(shared_timestamps[index])));
}
image = &alarm_image;
return image.WriteJpeg(filename);
} else {
image = image_buffer[index];
return image_buffer[index]->WriteJpeg(filename);
}
std::string filename = stringtf("Monitor%u.jpg", id);
image->WriteJpeg(filename);
return 1;
}
ZMPacket *Monitor::getSnapshot(int index) const {
@ -1727,6 +1786,52 @@ void Monitor::UpdateFPS() {
} // end if report fps
} // void Monitor::UpdateFPS()
//Thread where ONVIF polling, and other similar status polling can happen.
//Since these can be blocking, run here to avoid intefering with other processing
bool Monitor::Poll() {
#ifdef WITH_GSOAP
if (ONVIF_Healthy) {
set_credentials(soap);
int result = proxyEvent.PullMessages(response.SubscriptionReference.Address, NULL, &tev__PullMessages, tev__PullMessagesResponse);
if (result != SOAP_OK) {
if (result != -1) //Ignore the timeout error
Warning("Failed to get ONVIF messages! %i", result);
} else {
Debug(1, "Got Good Response! %i", result);
for (auto msg : tev__PullMessagesResponse.wsnt__NotificationMessage) {
if (msg->Topic->__any.text != NULL &&
std::strstr(msg->Topic->__any.text, "MotionAlarm") &&
msg->Message.__any.elts != NULL &&
msg->Message.__any.elts->next != NULL &&
msg->Message.__any.elts->next->elts != NULL &&
msg->Message.__any.elts->next->elts->atts != NULL &&
msg->Message.__any.elts->next->elts->atts->next != NULL &&
msg->Message.__any.elts->next->elts->atts->next->text != NULL) {
Debug(1,"Got Motion Alarm!");
if (strcmp(msg->Message.__any.elts->next->elts->atts->next->text, "true") == 0) {
//Event Start
Debug(1,"Triggered on ONVIF");
if (!ONVIF_Trigger_State) {
Debug(1,"Triggered Event");
ONVIF_Trigger_State = TRUE;
}
} else {
Debug(1, "Triggered off ONVIF");
ONVIF_Trigger_State = FALSE;
if (!ONVIF_Closes_Event) { //If we get a close event, then we know to expect them.
ONVIF_Closes_Event = TRUE;
Debug(1,"Setting ClosesEvent");
}
}
}
}
}
}
#endif
return TRUE;
} //end Poll
// Would be nice if this JUST did analysis
// This idea is that we should be analysing as close to the capture frame as possible.
// This method should process as much as possible before returning
@ -1774,6 +1879,22 @@ bool Monitor::Analyse() {
std::string cause;
Event::StringSetMap noteSetMap;
#ifdef WITH_GSOAP
if (ONVIF_Trigger_State) {
score += 9;
Debug(1, "Triggered on ONVIF");
if (!event) {
cause += "ONVIF";
}
Event::StringSet noteSet;
noteSet.insert("ONVIF2");
noteSetMap[MOTION_CAUSE] = noteSet;
//If the camera isn't going to send an event close, we need to close it here, but only after it has actually triggered an alarm.
if (!ONVIF_Closes_Event && state == ALARM)
ONVIF_Trigger_State = FALSE;
} // end ONVIF_Trigger
#endif
// Specifically told to be on. Setting the score here will trigger the alarm.
if (trigger_data->trigger_state == TriggerState::TRIGGER_ON) {
score += trigger_data->trigger_score;
@ -1874,6 +1995,8 @@ bool Monitor::Analyse() {
}
if (snap->image) {
alarm_image.Assign(*(snap->image));
// decoder may not have been able to provide an image
if (!ref_image.Buffer()) {
Debug(1, "Assigning instead of Detecting");
@ -2057,7 +2180,7 @@ bool Monitor::Analyse() {
if (state == PREALARM) {
// Generate analysis images if necessary
if ((savejpegs > 1) and snap->image) {
if (snap->image) {
for (const Zone &zone : zones) {
if (zone.Alarmed() and zone.AlarmImage()) {
if (!snap->analysis_image)
@ -2065,20 +2188,25 @@ bool Monitor::Analyse() {
snap->analysis_image->Overlay(*(zone.AlarmImage()));
} // end if zone is alarmed
} // end foreach zone
} // end if savejpegs
if (snap->analysis_image != nullptr)
alarm_image.Assign(*(snap->analysis_image));
} // end if image.
// incremement pre alarm image count
Event::AddPreAlarmFrame(snap->image, timestamp, score, nullptr);
} else if (state == ALARM) {
for (const Zone &zone : zones) {
if (zone.Alarmed()) {
if (zone.AlarmImage() and (savejpegs > 1) and snap->image) {
if (snap->image) {
for (const Zone &zone : zones) {
if (zone.Alarmed() and zone.AlarmImage()) {
if (!snap->analysis_image)
snap->analysis_image = new Image(*(snap->image));
snap->analysis_image->Overlay(*(zone.AlarmImage()));
}
} // end if zone is alarmed
} // end foreach zone
} // end if zone is alarmed
} // end foreach zone
if (snap->analysis_image != nullptr)
alarm_image.Assign(*(snap->analysis_image));
}
if (event) {
if (noteSetMap.size() > 0)
event->updateNotes(noteSetMap);
@ -2557,23 +2685,21 @@ bool Monitor::Decode() {
} else if (deinterlacing_value == 3) {
capture_image->Deinterlace_Blend();
} else if (deinterlacing_value == 4) {
ZMLockedPacket *deinterlace_packet_lock = nullptr;
while (!zm_terminate) {
ZMLockedPacket *second_packet_lock = packetqueue.get_packet(decoder_it);
if (!second_packet_lock) {
ZMLockedPacket *deinterlace_packet_lock = packetqueue.get_packet(decoder_it);
if (!deinterlace_packet_lock) {
packetqueue.unlock(packet_lock);
return false;
}
if (second_packet_lock->packet_->codec_type == packet->codec_type) {
deinterlace_packet_lock = second_packet_lock;
if (deinterlace_packet_lock->packet_->codec_type == packet->codec_type) {
capture_image->Deinterlace_4Field(deinterlace_packet_lock->packet_->image, (deinterlacing>>8)&0xff);
packetqueue.unlock(deinterlace_packet_lock);
break;
}
packetqueue.unlock(second_packet_lock);
packetqueue.unlock(deinterlace_packet_lock);
packetqueue.increment_it(decoder_it);
}
if (zm_terminate) return false;
capture_image->Deinterlace_4Field(deinterlace_packet_lock->packet_->image, (deinterlacing>>8)&0xff);
packetqueue.unlock(deinterlace_packet_lock);
} else if (deinterlacing_value == 5) {
capture_image->Deinterlace_Blend_CustomRatio((deinterlacing>>8)&0xff);
}
@ -2668,7 +2794,7 @@ void Monitor::TimestampImage(Image *ts_image, SystemTimePoint ts_time) const {
Event * Monitor::openEvent(
const std::shared_ptr<ZMPacket> &snap,
const std::string &cause,
const Event::StringSetMap noteSetMap) {
const Event::StringSetMap &noteSetMap) {
// FIXME this iterator is not protected from invalidation
packetqueue_iterator *start_it = packetqueue.get_event_start_packet_it(
@ -3005,6 +3131,16 @@ int Monitor::PrimeCapture() {
}
} // end if rtsp_server
#ifdef WITH_GSOAP //For now, just don't run the thread if no ONVIF support. This may change if we add other long polling options.
//ONVIF Thread
if (onvif_event_listener) {
if (!Poller) {
Poller = zm::make_unique<PollThread>(this);
} else {
Poller->Start();
}
}
#endif
if (decoding_enabled) {
if (!decoder_it) decoder_it = packetqueue.get_video_it(false);
if (!decoder) {
@ -3042,6 +3178,24 @@ int Monitor::Close() {
if (analysis_thread) {
analysis_thread->Stop();
}
#ifdef WITH_GSOAP
//ONVIF Teardown
if (Poller) {
Poller->Stop();
}
if (onvif_event_listener && (soap != nullptr)) {
Debug(1, "Tearing Down Onvif");
_wsnt__Unsubscribe wsnt__Unsubscribe;
_wsnt__UnsubscribeResponse wsnt__UnsubscribeResponse;
proxyEvent.Unsubscribe(response.SubscriptionReference.Address, NULL, &wsnt__Unsubscribe, wsnt__UnsubscribeResponse);
soap_destroy(soap);
soap_end(soap);
soap_free(soap);
soap = nullptr;
} //End ONVIF
#endif
packetqueue.clear();
if (audio_fifo) {
delete audio_fifo;
@ -3144,3 +3298,36 @@ StringVector Monitor::GroupNames() {
}
return groupnames;
} // end Monitor::GroupNames()
#ifdef WITH_GSOAP
//ONVIF Set Credentials
void Monitor::set_credentials(struct soap *soap)
{
soap_wsse_delete_Security(soap);
soap_wsse_add_Timestamp(soap, NULL, 10);
soap_wsse_add_UsernameTokenDigest(soap, "Auth", onvif_username.c_str(), onvif_password.c_str());
}
//GSOAP boilerplate
int SOAP_ENV__Fault(struct soap *soap, char *faultcode, char *faultstring, char *faultactor, struct SOAP_ENV__Detail *detail, struct SOAP_ENV__Code *SOAP_ENV__Code, struct SOAP_ENV__Reason *SOAP_ENV__Reason, char *SOAP_ENV__Node, char *SOAP_ENV__Role, struct SOAP_ENV__Detail *SOAP_ENV__Detail)
{
// populate the fault struct from the operation arguments to print it
soap_fault(soap);
// SOAP 1.1
soap->fault->faultcode = faultcode;
soap->fault->faultstring = faultstring;
soap->fault->faultactor = faultactor;
soap->fault->detail = detail;
// SOAP 1.2
soap->fault->SOAP_ENV__Code = SOAP_ENV__Code;
soap->fault->SOAP_ENV__Reason = SOAP_ENV__Reason;
soap->fault->SOAP_ENV__Node = SOAP_ENV__Node;
soap->fault->SOAP_ENV__Role = SOAP_ENV__Role;
soap->fault->SOAP_ENV__Detail = SOAP_ENV__Detail;
// set error
soap->error = SOAP_FAULT;
// handle or display the fault here with soap_stream_fault(soap, std::cerr);
// return HTTP 202 Accepted
return soap_send_empty_response(soap, SOAP_OK);
}
#endif

View File

@ -23,6 +23,7 @@
#include "zm_define.h"
#include "zm_camera.h"
#include "zm_analysis_thread.h"
#include "zm_poll_thread.h"
#include "zm_decoder_thread.h"
#include "zm_event.h"
#include "zm_fifo.h"
@ -34,6 +35,12 @@
#include <sys/time.h>
#include <vector>
#ifdef WITH_GSOAP
#include "soapPullPointSubscriptionBindingProxy.h"
#include "plugin/wsseapi.h"
#include <openssl/err.h>
#endif
class Group;
#define SIGNAL_CAUSE "Signal"
@ -97,7 +104,7 @@ public:
FILE,
FFMPEG,
LIBVLC,
CURL,
LIBCURL,
NVSOCKET,
VNC,
} CameraType;
@ -280,6 +287,8 @@ protected:
};
protected:
// These are read from the DB and thereafter remain unchanged
unsigned int id;
std::string name;
@ -310,6 +319,7 @@ protected:
std::string onvif_username;
std::string onvif_password;
std::string onvif_options;
bool onvif_event_listener;
std::string device;
int palette;
@ -433,6 +443,7 @@ protected:
VideoStore *videoStore;
PacketQueue packetqueue;
std::unique_ptr<PollThread> Poller;
packetqueue_iterator *analysis_it;
std::unique_ptr<AnalysisThread> analysis_thread;
packetqueue_iterator *decoder_it;
@ -459,6 +470,20 @@ protected:
std::string diag_path_ref;
std::string diag_path_delta;
//ONVIF
#ifdef WITH_GSOAP
struct soap *soap;
bool ONVIF_Trigger_State;
bool ONVIF_Healthy;
bool ONVIF_Closes_Event;
_tev__CreatePullPointSubscription request;
_tev__CreatePullPointSubscriptionResponse response;
_tev__PullMessages tev__PullMessages;
_tev__PullMessagesResponse tev__PullMessagesResponse;
PullPointSubscriptionBindingProxy proxyEvent;
void set_credentials(struct soap *soap);
#endif
// Used in check signal
uint8_t red_val;
uint8_t green_val;
@ -618,6 +643,7 @@ public:
const std::string &getONVIF_Password() const { return onvif_password; };
const std::string &getONVIF_Options() const { return onvif_options; };
Image *GetAlarmImage();
int GetImage(int32_t index=-1, int scale=100);
ZMPacket *getSnapshot( int index=-1 ) const;
SystemTimePoint GetTimestamp(int index = -1) const;
@ -674,12 +700,13 @@ public:
bool CheckSignal( const Image *image );
bool Analyse();
bool Decode();
bool Poll();
void DumpImage( Image *dump_image ) const;
void TimestampImage(Image *ts_image, SystemTimePoint ts_time) const;
Event *openEvent(
const std::shared_ptr<ZMPacket> &snap,
const std::string &cause,
const Event::StringSetMap noteSetMap);
const Event::StringSetMap &noteSetMap);
void closeEvent();
void Reload();

View File

@ -243,6 +243,14 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
Info("User initiated exit - CMD_QUIT");
zm_terminate = true;
break;
case CMD_ANALYZE_ON :
frame_type = FRAME_ANALYSIS;
Debug(1, "ANALYSIS on");
break;
case CMD_ANALYZE_OFF :
frame_type = FRAME_NORMAL;
Debug(1, "ANALYSIS off");
break;
case CMD_QUERY :
Debug(1, "Got QUERY command, sending STATUS");
break;
@ -720,9 +728,22 @@ void MonitorStream::runStream() {
// Perhaps we should use NOW instead.
last_frame_timestamp =
SystemTimePoint(zm::chrono::duration_cast<Microseconds>(monitor->shared_timestamps[index]));
Image *image = monitor->image_buffer[index];
if (!sendFrame(image, last_frame_timestamp)) {
Image *send_image = nullptr;
if ((frame_type == FRAME_ANALYSIS) &&
(monitor->GetFunction() == Monitor::MOCORD || monitor->GetFunction() == Monitor::MODECT)) {
Debug(1, "Sending analysis image");
send_image = monitor->GetAlarmImage();
if ( !send_image ) {
Debug(1, "Falling back");
send_image = monitor->image_buffer[index];
}
} else {
Debug(1, "Sending regular image");
send_image = monitor->image_buffer[index];
}
if (!sendFrame(send_image, last_frame_timestamp)) {
Debug(2, "sendFrame failed, quiting.");
zm_terminate = true;
break;
@ -731,7 +752,7 @@ void MonitorStream::runStream() {
if (frame_count == 0) {
// Chrome will not display the first frame until it receives another.
// Firefox is fine. So just send the first frame twice.
if (!sendFrame(image, last_frame_timestamp)) {
if (!sendFrame(send_image, last_frame_timestamp)) {
Debug(2, "sendFrame failed, quiting.");
zm_terminate = true;
break;
@ -753,7 +774,7 @@ void MonitorStream::runStream() {
frame_count++;
frame_count++;
} else {
SystemTimePoint::duration actual_delta_time = now - last_frame_sent;
TimePoint::duration actual_delta_time = now - last_frame_sent;
if (actual_delta_time > Seconds(5)) {
if (paused_image) {
// Send keepalive

View File

@ -1,17 +1,17 @@
/*
* ZoneMinder MPEG class implementation, $Date$, $Revision$
* Copyright (C) 2001-2008 Philip Coombes
*
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
@ -42,19 +42,26 @@ void VideoStream::SetupFormat( ) {
ofc = nullptr;
avformat_alloc_output_context2(&ofc, nullptr, format, filename);
if ( !ofc ) {
if (!ofc) {
Fatal("avformat_alloc_..._context failed");
}
of = ofc->oformat;
Debug(1, "Using output format: %s (%s)", of->name, of->long_name);
Debug(1, "Using output format: %s (%s)", of->name, of->long_name);
}
void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int height, int bitrate, double frame_rate ) {
int VideoStream::SetupCodec(
int colours,
int subpixelorder,
int width,
int height,
int bitrate,
double frame_rate
) {
/* ffmpeg format matching */
switch ( colours ) {
switch (colours) {
case ZM_COLOUR_RGB24:
if ( subpixelorder == ZM_SUBPIX_ORDER_BGR ) {
if (subpixelorder == ZM_SUBPIX_ORDER_BGR) {
/* BGR subpixel order */
pf = AV_PIX_FMT_BGR24;
} else {
@ -63,13 +70,13 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
}
break;
case ZM_COLOUR_RGB32:
if ( subpixelorder == ZM_SUBPIX_ORDER_ARGB ) {
if (subpixelorder == ZM_SUBPIX_ORDER_ARGB) {
/* ARGB subpixel order */
pf = AV_PIX_FMT_ARGB;
} else if ( subpixelorder == ZM_SUBPIX_ORDER_ABGR ) {
} else if (subpixelorder == ZM_SUBPIX_ORDER_ABGR) {
/* ABGR subpixel order */
pf = AV_PIX_FMT_ABGR;
} else if ( subpixelorder == ZM_SUBPIX_ORDER_BGRA ) {
} else if (subpixelorder == ZM_SUBPIX_ORDER_BGRA) {
/* BGRA subpixel order */
pf = AV_PIX_FMT_BGRA;
} else {
@ -85,22 +92,22 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
break;
}
if ( strcmp("rtp", of->name) == 0 ) {
if (strcmp("rtp", of->name) == 0) {
// RTP must have a packet_size.
// Not sure what this value should be really...
ofc->packet_size = width*height;
Debug(1,"Setting packet_size to %d", ofc->packet_size);
if ( of->video_codec == AV_CODEC_ID_NONE ) {
if (of->video_codec == AV_CODEC_ID_NONE) {
// RTP does not have a default codec in ffmpeg <= 0.8.
of->video_codec = AV_CODEC_ID_MPEG4;
}
}
_AVCODECID codec_id = of->video_codec;
if ( codec_name ) {
if (codec_name) {
AVCodec *a = avcodec_find_encoder_by_name(codec_name);
if ( a ) {
if (a) {
codec_id = a->id;
Debug(1, "Using codec \"%s\"", codec_name);
} else {
@ -111,31 +118,29 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
/* add the video streams using the default format codecs
and initialize the codecs */
ost = nullptr;
if ( codec_id != AV_CODEC_ID_NONE ) {
if (codec_id != AV_CODEC_ID_NONE) {
codec = avcodec_find_encoder(codec_id);
if ( !codec ) {
Fatal("Could not find encoder for '%s'", avcodec_get_name(codec_id));
if (!codec) {
Error("Could not find encoder for '%s'", avcodec_get_name(codec_id));
return -1;
}
Debug(1, "Found encoder for '%s'", avcodec_get_name(codec_id));
ost = avformat_new_stream( ofc, codec );
if ( !ost ) {
Fatal( "Could not alloc stream" );
return;
ost = avformat_new_stream(ofc, codec);
if (!ost) {
Error("Could not alloc stream");
return -1;
}
Debug( 1, "Allocated stream (%d) !=? (%d)", ost->id , ofc->nb_streams - 1 );
Debug(1, "Allocated stream (%d) !=? (%d)", ost->id , ofc->nb_streams - 1);
ost->id = ofc->nb_streams - 1;
codec_context = avcodec_alloc_context3(nullptr);
//avcodec_parameters_to_context(codec_context, ost->codecpar);
codec_context->codec_id = codec->id;
codec_context->codec_type = codec->type;
codec_context->pix_fmt = strcmp("mjpeg", ofc->oformat->name) == 0 ? AV_PIX_FMT_YUVJ422P : AV_PIX_FMT_YUV420P;
if ( bitrate <= 100 ) {
if (bitrate <= 100) {
// Quality based bitrate control (VBR). Scale is 1..31 where 1 is best.
// This gets rid of artifacts in the beginning of the movie; and well, even quality.
codec_context->flags |= AV_CODEC_FLAG_QSCALE;
@ -155,22 +160,22 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
codec_context->time_base.num = 1;
ost->time_base.den = frame_rate;
ost->time_base.num = 1;
Debug( 1, "Will encode in %d fps. %dx%d", codec_context->time_base.den, width, height );
/* emit one intra frame every second */
codec_context->gop_size = frame_rate;
// some formats want stream headers to be separate
if ( of->flags & AVFMT_GLOBALHEADER )
if (of->flags & AVFMT_GLOBALHEADER)
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_parameters_from_context(ost->codecpar, codec_context);
zm_dump_codecpar(ost->codecpar);
} else {
Fatal( "of->video_codec == AV_CODEC_ID_NONE" );
}
Error("of->video_codec == AV_CODEC_ID_NONE");
return -1;
}
return 0;
}
void VideoStream::SetParameters( ) {
@ -198,11 +203,11 @@ const char *VideoStream::MimeType() const {
bool VideoStream::OpenStream( ) {
int ret;
/* now that all the parameters are set, we can open the
/* now that all the parameters are set, we can open the
video codecs and allocate the necessary encode buffers */
if ( ost ) {
Debug(1,"Opening codec");
/* open the codec */
if ((ret = avcodec_open2(codec_context, codec, nullptr)) < 0) {
@ -319,7 +324,7 @@ VideoStream::VideoStream( const char *in_filename, const char *in_format, int bi
if ( !initialised ) {
Initialise( );
}
if ( format ) {
int length = strlen(format);
codec_and_format = new char[length+1];;
@ -337,13 +342,13 @@ VideoStream::VideoStream( const char *in_filename, const char *in_format, int bi
SetupFormat( );
SetupCodec( colours, subpixelorder, width, height, bitrate, frame_rate );
SetParameters( );
// Allocate buffered packets.
packet_buffers = new AVPacket*[2];
packet_buffers[0] = new AVPacket();
packet_buffers[1] = new AVPacket();
packet_index = 0;
// Initialize mutex used by streaming thread.
if ( pthread_mutex_init( buffer_copy_lock, nullptr ) != 0 ) {
Fatal("pthread_mutex_init failed");
@ -353,35 +358,35 @@ VideoStream::VideoStream( const char *in_filename, const char *in_format, int bi
VideoStream::~VideoStream( ) {
Debug( 1, "VideoStream destructor." );
// Stop streaming thread.
if ( streaming_thread ) {
do_streaming = false;
void* thread_exit_code;
Debug( 1, "Asking streaming thread to exit." );
// Wait for thread to exit.
pthread_join(streaming_thread, &thread_exit_code);
}
if ( buffer_copy != nullptr ) {
av_free( buffer_copy );
}
if ( buffer_copy_lock ) {
if ( pthread_mutex_destroy( buffer_copy_lock ) != 0 ) {
Error( "pthread_mutex_destroy failed" );
}
delete buffer_copy_lock;
}
if (packet_buffers) {
delete packet_buffers[0];
delete packet_buffers[1];
delete[] packet_buffers;
}
/* close each codec */
if ( ost ) {
avcodec_close( codec_context );
@ -409,7 +414,7 @@ VideoStream::~VideoStream( ) {
/* free the stream */
av_free( ofc );
/* free format and codec_name data. */
if ( codec_and_format ) {
delete codec_and_format;
@ -420,12 +425,12 @@ double VideoStream::EncodeFrame( const uint8_t *buffer, int buffer_size, bool _a
if ( pthread_mutex_lock(buffer_copy_lock) != 0 ) {
Fatal( "EncodeFrame: pthread_mutex_lock failed." );
}
if (buffer_copy_size < buffer_size) {
if ( buffer_copy ) {
av_free(buffer_copy);
}
// Allocate a buffer to store source images for the streaming thread to encode.
buffer_copy = (uint8_t *)av_malloc(buffer_size);
if ( !buffer_copy ) {
@ -435,35 +440,34 @@ double VideoStream::EncodeFrame( const uint8_t *buffer, int buffer_size, bool _a
}
buffer_copy_size = buffer_size;
}
add_timestamp = _add_timestamp;
timestamp = _timestamp;
buffer_copy_used = buffer_size;
memcpy(buffer_copy, buffer, buffer_size);
if ( pthread_mutex_unlock(buffer_copy_lock) != 0 ) {
Fatal( "EncodeFrame: pthread_mutex_unlock failed." );
}
if ( streaming_thread == 0 ) {
Debug( 1, "Starting streaming thread" );
// Start a thread for streaming encoded video.
if (pthread_create( &streaming_thread, nullptr, StreamingThreadCallback, (void*) this) != 0){
// Log a fatal error and exit the process.
Fatal( "VideoStream failed to create streaming thread." );
}
}
//return ActuallyEncodeFrame( buffer, buffer_size, add_timestamp, timestamp);
return _timestamp;
}
double VideoStream::ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size, bool add_timestamp, unsigned int timestamp ) {
if ( codec_context->pix_fmt != pf ) {
static struct SwsContext *img_convert_ctx = nullptr;
static struct SwsContext *img_convert_ctx = nullptr;
memcpy( tmp_opicture->data[0], buffer, buffer_size );
if ( !img_convert_ctx ) {
img_convert_ctx = sws_getCachedContext( nullptr, codec_context->width, codec_context->height, pf, codec_context->width, codec_context->height, codec_context->pix_fmt, SWS_BICUBIC, nullptr, nullptr, nullptr );
@ -475,37 +479,36 @@ double VideoStream::ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size,
memcpy( opicture->data[0], buffer, buffer_size );
}
AVFrame *opicture_ptr = opicture;
AVPacket *pkt = packet_buffers[packet_index];
av_init_packet( pkt );
int got_packet = 0;
if (codec_context->codec_type == AVMEDIA_TYPE_VIDEO &&
codec_context->codec_id == AV_CODEC_ID_RAWVIDEO) {
pkt->flags |= AV_PKT_FLAG_KEY;
pkt->stream_index = ost->index;
pkt->data = (uint8_t *)opicture_ptr;
pkt->size = sizeof (AVPicture);
got_packet = 1;
int got_packet = 0;
if (codec_context->codec_type == AVMEDIA_TYPE_VIDEO &&
codec_context->codec_id == AV_CODEC_ID_RAWVIDEO) {
pkt->flags |= AV_PKT_FLAG_KEY;
pkt->stream_index = ost->index;
pkt->data = (uint8_t *)opicture_ptr;
pkt->size = sizeof (AVPicture);
got_packet = 1;
} else {
opicture_ptr->pts = codec_context->frame_number;
opicture_ptr->quality = codec_context->global_quality;
avcodec_send_frame(codec_context, opicture_ptr);
int ret = avcodec_receive_packet(codec_context, pkt);
if ( ret < 0 ) {
if ( AVERROR_EOF != ret ) {
Error("ERror encoding video (%d) (%s)", ret,
av_err2str(ret));
}
} else {
got_packet = 1;
avcodec_send_frame(codec_context, opicture_ptr);
int ret = avcodec_receive_packet(codec_context, pkt);
if (ret < 0) {
if (AVERROR_EOF != ret) {
Error("ERror encoding video (%d) (%s)", ret, av_err2str(ret));
}
} else {
got_packet = 1;
}
if ( got_packet ) {
// if ( c->coded_frame->key_frame )
// {
// pkt->flags |= AV_PKT_FLAG_KEY;
// }
if (got_packet) {
// if ( c->coded_frame->key_frame )
// {
// pkt->flags |= AV_PKT_FLAG_KEY;
// }
if ( pkt->pts != (int64_t)AV_NOPTS_VALUE ) {
pkt->pts = av_rescale_q( pkt->pts, codec_context->time_base, ost->time_base );
@ -517,18 +520,17 @@ double VideoStream::ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size,
pkt->stream_index = ost->index;
}
}
return ( opicture_ptr->pts);
return opicture_ptr->pts;
}
int VideoStream::SendPacket(AVPacket *packet) {
int ret = av_write_frame( ofc, packet );
if ( ret != 0 ) {
Fatal( "Error %d while writing video frame: %s", ret, av_err2str( errno ) );
}
av_packet_unref( packet );
return ret;
int ret = av_write_frame(ofc, packet);
if (ret < 0) {
Error("Error %d while writing video frame: %s", ret, av_err2str(errno));
}
av_packet_unref(packet);
return ret;
}
void *VideoStream::StreamingThreadCallback(void *ctx) {

View File

@ -68,7 +68,7 @@ protected:
static void Initialise();
void SetupFormat( );
void SetupCodec( int colours, int subpixelorder, int width, int height, int bitrate, double frame_rate );
int SetupCodec( int colours, int subpixelorder, int width, int height, int bitrate, double frame_rate );
void SetParameters();
void ActuallyOpenStream();
double ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size, bool add_timestamp=false, unsigned int timestamp=0 );

32
src/zm_poll_thread.cpp Normal file
View File

@ -0,0 +1,32 @@
#include "zm_poll_thread.h"
#include "zm_monitor.h"
#include "zm_signal.h"
#include "zm_time.h"
PollThread::PollThread(Monitor *monitor) :
monitor_(monitor), terminate_(false) {
thread_ = std::thread(&PollThread::Run, this);
}
PollThread::~PollThread() {
Stop();
}
void PollThread::Start() {
if (thread_.joinable()) thread_.join();
terminate_ = false;
Debug(3, "Starting polling thread");
thread_ = std::thread(&PollThread::Run, this);
}
void PollThread::Stop() {
terminate_ = true;
if (thread_.joinable()) {
thread_.join();
}
}
void PollThread::Run() {
while (!(terminate_ or zm_terminate)) {
monitor_->Poll();
}
}

29
src/zm_poll_thread.h Normal file
View File

@ -0,0 +1,29 @@
#ifndef ZM_POLL_THREAD_H
#define ZM_POLL_THREAD_H
#include <atomic>
#include <memory>
#include <thread>
class Monitor;
class PollThread {
public:
explicit PollThread(Monitor *monitor);
~PollThread();
PollThread(PollThread &rhs) = delete;
PollThread(PollThread &&rhs) = delete;
void Start();
void Stop();
bool Stopped() const { return terminate_; }
private:
void Run();
Monitor *monitor_;
std::atomic<bool> terminate_;
std::thread thread_;
};
#endif

View File

@ -21,7 +21,7 @@ class ZoneMinderDeviceSource;
class BaseServerMediaSubsession {
public:
BaseServerMediaSubsession(StreamReplicator* replicator):
explicit BaseServerMediaSubsession(StreamReplicator* replicator):
m_replicator(replicator) {};
FramedSource* createSource(

View File

@ -157,7 +157,6 @@ Image *StreamBase::prepareImage(Image *image) {
int disp_image_width = (image->Width() * scale) / ZM_SCALE_BASE, disp_image_height = (image->Height() * scale) / ZM_SCALE_BASE;
int last_disp_image_width = (image->Width() * last_scale) / ZM_SCALE_BASE, last_disp_image_height = (image->Height() * last_scale) / ZM_SCALE_BASE;
int send_image_width = (disp_image_width * act_mag ) / mag, send_image_height = (disp_image_height * act_mag ) / mag;
int last_send_image_width = (last_disp_image_width * last_act_mag ) / last_mag, last_send_image_height = (last_disp_image_height * last_act_mag ) / last_mag;
Debug(3,
"Scaling by %d, zooming by %d = magnifying by %d(%d)\n"
@ -169,8 +168,7 @@ Image *StreamBase::prepareImage(Image *image) {
"Last actual image width = %d, height = %d\n"
"Display image width = %d, height = %d\n"
"Last display image width = %d, height = %d\n"
"Send image width = %d, height = %d\n"
"Last send image width = %d, height = %d\n",
"Send image width = %d, height = %d\n",
scale, zoom, mag, act_mag,
last_scale, last_zoom, last_mag, last_act_mag,
base_image_width, base_image_height,
@ -180,8 +178,7 @@ Image *StreamBase::prepareImage(Image *image) {
last_act_image_width, last_act_image_height,
disp_image_width, disp_image_height,
last_disp_image_width, last_disp_image_height,
send_image_width, send_image_height,
last_send_image_width, last_send_image_height
send_image_width, send_image_height
);
if ( ( mag != ZM_SCALE_BASE ) && (act_mag != ZM_SCALE_BASE) ) {

View File

@ -40,6 +40,7 @@ public:
STREAM_SINGLE,
STREAM_MPEG
} StreamType;
typedef enum { FRAME_NORMAL, FRAME_ANALYSIS } FrameType;
protected:
static constexpr Seconds MAX_STREAM_DELAY = Seconds(5);
@ -89,6 +90,8 @@ protected:
CMD_GET_IMAGE,
CMD_QUIT,
CMD_MAXFPS,
CMD_ANALYZE_ON,
CMD_ANALYZE_OFF,
CMD_QUERY=99
} MsgCommand;
@ -97,6 +100,7 @@ protected:
std::shared_ptr<Monitor> monitor;
StreamType type;
FrameType frame_type;
const char *format;
int replay_rate;
int scale;
@ -153,6 +157,7 @@ public:
monitor_id(0),
monitor(nullptr),
type(DEFAULT_TYPE),
frame_type(FRAME_NORMAL),
format(""),
replay_rate(DEFAULT_RATE),
scale(DEFAULT_SCALE),
@ -198,7 +203,9 @@ public:
type = STREAM_RAW;
}
#endif
}
void setStreamFrameType(FrameType p_type) {
frame_type = p_type;
}
void setStreamFormat(const char *p_format) {
format = p_format;

View File

@ -22,7 +22,14 @@
#include "zm_image.h"
#include "zm_logger.h"
SWScale::SWScale() : gotdefaults(false), swscale_ctx(nullptr), input_avframe(nullptr), output_avframe(nullptr) {
SWScale::SWScale() :
gotdefaults(false),
swscale_ctx(nullptr),
input_avframe(nullptr),
output_avframe(nullptr),
default_width(0),
default_height(0)
{
Debug(4, "SWScale object created");
}

View File

@ -91,7 +91,7 @@ Duration duration_cast(timeval const &tv) {
//
class TimeSegmentAdder {
public:
TimeSegmentAdder(Microseconds &in_target) :
explicit TimeSegmentAdder(Microseconds &in_target) :
target_(in_target),
start_time_(std::chrono::steady_clock::now()),
finished_(false) {

View File

@ -120,9 +120,6 @@ std::string Join(const StringVector &values, const std::string &delim) {
std::string stringtf(const char* format, ...) {
va_list args;
va_start(args, format);
va_list args2;
va_copy(args2, args);
int size = vsnprintf(nullptr, 0, format, args) + 1; // Extra space for '\0'
va_end(args);
@ -131,6 +128,8 @@ std::string stringtf(const char* format, ...) {
}
std::unique_ptr<char[]> buf(new char[size]);
va_list args2;
va_copy(args2, args);
vsnprintf(buf.get(), size, format, args2);
va_end(args2);
@ -259,6 +258,8 @@ void HwCapsDetect() {
unsigned long auxval = 0;
elf_aux_info(AT_HWCAP, &auxval, sizeof(auxval));
if (auxval & HWCAP_NEON) {
#else
{
#error Unsupported OS.
#endif
Debug(1,"Detected ARM (AArch32) processor with Neon");

View File

@ -401,6 +401,10 @@ bool VideoStore::open() {
} else {
audio_in_ctx = avcodec_alloc_context3(audio_out_codec);
ret = avcodec_parameters_to_context(audio_in_ctx, audio_in_stream->codecpar);
if (ret < 0)
Error("Failure from avcodec_parameters_to_context %s",
av_make_error_string(ret).c_str());
audio_in_ctx->time_base = audio_in_stream->time_base;
audio_out_ctx = avcodec_alloc_context3(audio_out_codec);
@ -729,7 +733,6 @@ bool VideoStore::setup_resampler() {
audio_out_ctx->sample_fmt = audio_in_ctx->sample_fmt;
audio_out_ctx->channels = audio_in_ctx->channels;
audio_out_ctx->channel_layout = audio_in_ctx->channel_layout;
audio_out_ctx->sample_fmt = audio_in_ctx->sample_fmt;
if (!audio_out_ctx->channel_layout) {
Debug(3, "Correcting channel layout from (%" PRIi64 ") to (%" PRIi64 ")",
audio_out_ctx->channel_layout,
@ -852,7 +855,7 @@ bool VideoStore::setup_resampler() {
return false;
}
if ((ret = swr_init(resample_ctx)) < 0) {
Error("Could not open resampler");
Error("Could not open resampler %d", ret);
av_frame_free(&in_frame);
av_frame_free(&out_frame);
swr_free(&resample_ctx);

View File

@ -219,7 +219,7 @@ bool Zone::CheckAlarms(const Image *delta_image) {
int alarm_mid_x = -1;
int alarm_mid_y = -1;
unsigned int lo_x = polygon.Extent().Lo().x_;
//unsigned int lo_x = polygon.Extent().Lo().x_;
unsigned int lo_y = polygon.Extent().Lo().y_;
unsigned int hi_x = polygon.Extent().Hi().x_;
unsigned int hi_y = polygon.Extent().Hi().y_;
@ -699,6 +699,7 @@ bool Zone::CheckAlarms(const Image *delta_image) {
if ((type < PRECLUSIVE) && (check_method >= BLOBS) && (monitor->GetOptSaveJPEGs() > 1)) {
unsigned int lo_x = polygon.Extent().Lo().x_;
// First mask out anything we don't want
for (unsigned int y = lo_y; y <= hi_y; y++) {
pdiff = diff_buff + ((diff_width * y) + lo_x);

View File

@ -63,6 +63,7 @@ int main(int argc, const char *argv[], char **envp) {
double maxfps = 10.0;
unsigned int bitrate = 100000;
unsigned int ttl = 0;
bool analysis_frames = false;
EventStream::StreamMode replay = EventStream::MODE_NONE;
std::string username;
std::string password;
@ -115,7 +116,14 @@ int main(int argc, const char *argv[], char **envp) {
char const *value = strtok(nullptr, "=");
if ( !value )
value = "";
if ( !strcmp(name, "source") ) {
if ( !strcmp(name, "analysis") ) {
if ( !strcmp(value, "true") ) {
analysis_frames = true;
} else {
analysis_frames = (atoi(value) == 1);
}
Debug(1, "Viewing analysis frames");
} else if ( !strcmp(name, "source") ) {
if ( !strcmp(value, "event") ) {
source = ZMS_EVENT;
} else if ( !strcmp(value, "fifo") ) {
@ -271,6 +279,7 @@ int main(int argc, const char *argv[], char **envp) {
zmDbClose();
return -1;
}
stream.setStreamFrameType(analysis_frames ? StreamBase::FRAME_ANALYSIS: StreamBase::FRAME_NORMAL);
if ( mode == ZMS_JPEG ) {
stream.setStreamType(MonitorStream::STREAM_JPEG);
@ -307,6 +316,7 @@ int main(int argc, const char *argv[], char **envp) {
Debug(3, "Setting stream start to frame (%d)", frame_id);
stream.setStreamStart(event_id, frame_id);
}
stream.setStreamFrameType(analysis_frames ? StreamBase::FRAME_ANALYSIS: StreamBase::FRAME_NORMAL);
if ( mode == ZMS_JPEG ) {
stream.setStreamType(EventStream::STREAM_JPEG);
} else {

View File

@ -1 +1 @@
1.37.6
1.37.7

View File

@ -9,6 +9,7 @@ $raw = isset($_REQUEST['raw']);
$data = array();
if ($raw) {
$data['raw'] = array();
$sql = 'SELECT S.*,E.*,Z.Name AS ZoneName,Z.Units,Z.Area,M.Name AS MonitorName
FROM Stats AS S LEFT JOIN Events AS E ON S.EventId = E.Id LEFT JOIN Zones AS Z ON S.ZoneId = Z.Id LEFT JOIN Monitors AS M ON E.MonitorId = M.Id
WHERE S.EventId = ? AND S.FrameId = ? ORDER BY S.ZoneId';

View File

@ -124,6 +124,7 @@ class Monitor extends ZM_Object {
'ONVIF_Username' => '',
'ONVIF_Password' => '',
'ONVIF_Options' => '',
'ONVIF_Event_Listener' => '0',
'Device' => '',
'Channel' => 0,
'Format' => '0',

View File

@ -11,19 +11,24 @@ class ZM_Object {
$class = get_class($this);
$row = NULL;
if ( $IdOrRow ) {
if ($IdOrRow) {
if ( is_integer($IdOrRow) or ctype_digit($IdOrRow) ) {
if (is_integer($IdOrRow) or ctype_digit($IdOrRow)) {
$table = $class::$table;
$row = dbFetchOne("SELECT * FROM `$table` WHERE `Id`=?", NULL, array($IdOrRow));
if ( !$row ) {
if (!$row) {
Error("Unable to load $class record for Id=$IdOrRow");
return;
}
} else if ( is_array($IdOrRow) ) {
} else if (is_array($IdOrRow)) {
$row = $IdOrRow;
}
if ( $row ) {
if (!isset($row['Id'])) {
Error("No Id in " . print_r($row, true));
return;
}
foreach ($row as $k => $v) {
$this->{$k} = $v;
}

View File

@ -120,6 +120,8 @@ define('CMD_SEEK', 14 );
define('CMD_VARPLAY', 15);
define('CMD_QUIT', 17);
define('CMD_MAXFPS', 18);
define('CMD_ANALYZE_ON', 19);
define('CMD_ANALYZE_OFF', 20);
define('CMD_QUERY', 99);
//

View File

@ -2,6 +2,8 @@
function MonitorStream(monitorData) {
this.id = monitorData.id;
this.connKey = monitorData.connKey;
this.auth_relay = auth_relay;
this.auth_hash = auth_hash;
this.url = monitorData.url;
this.url_to_zms = monitorData.url_to_zms;
this.width = monitorData.width;
@ -206,36 +208,38 @@ function MonitorStream(monitorData) {
var newAlarm = ( isAlarmed && !wasAlarmed );
var oldAlarm = ( !isAlarmed && wasAlarmed );
if ( newAlarm ) {
if ( false && SOUND_ON_ALARM ) {
if (newAlarm) {
if (false && SOUND_ON_ALARM) {
// Enable the alarm sound
$j('#alarmSound').removeClass('hidden');
}
if ( (typeof POPUP_ON_ALARM !== 'undefined') && POPUP_ON_ALARM ) {
if ((typeof POPUP_ON_ALARM !== 'undefined') && POPUP_ON_ALARM) {
windowToFront();
}
}
if ( false && SOUND_ON_ALARM ) {
if (false && SOUND_ON_ALARM) {
if ( oldAlarm ) {
// Disable alarm sound
$j('#alarmSound').addClass('hidden');
}
}
if ( this.status.auth ) {
if ( this.status.auth != auth_hash ) {
if (this.status.auth) {
if (this.status.auth != auth_hash) {
// Try to reload the image stream.
if ( stream ) {
stream.src = stream.src.replace(/auth=\w+/i, 'auth='+this.status.auth);
if (stream) {
const oldsrc = stream.src;
stream.src = '';
stream.src = oldsrc.replace(/auth=\w+/i, 'auth='+this.status.auth);
}
console.log("Changed auth from " + auth_hash + " to " + this.status.auth);
auth_hash = this.status.auth;
console.log("Changed auth from " + this.auth_hash + " to " + this.status.auth);
this.auth_hash = this.status.auth;
}
} // end if have a new auth hash
} // end if has state
} else {
console.error(respObj.message);
// Try to reload the image stream.
if ( stream ) {
if (stream) {
if ( stream.src ) {
console.log('Reloading stream: ' + stream.src);
src = stream.src.replace(/rand=\d+/i, 'rand='+Math.floor((Math.random() * 1000000) ));
@ -276,4 +280,10 @@ function MonitorStream(monitorData) {
.fail(this.onFailure.bind(this));
};
}
this.analyse_frames = true;
this.show_analyse_frames = function(toggle) {
this.analyse_frames = toggle;
this.streamCmdParms.command = this.analyse_frames?CMD_ANALYZE_ON:CMD_ANALYZE_OFF;
this.streamCmdReq(this.streamCmdParms);
};
} // end function MonitorStream

View File

@ -116,7 +116,10 @@ xhtmlHeaders(__FILE__, translate('Frame').' - '.$Event->Id().' - '.$Frame->Frame
<p id="image">
<?php
if ( $imageData['hasAnalImage'] ) {
echo sprintf('<a href="?view=frame&amp;eid=%d&amp;fid=%d&scale=%d&amp;show=%s">', $Event->Id(), $Frame->FrameId(), $scale, ( $show=='anal'?'capt':'anal' ) );
echo sprintf('<a href="?view=frame&amp;eid=%d&amp;fid=%d&scale=%d&amp;show=%s" title="Click to display frame %s analysis">',
$Event->Id(), $Frame->FrameId(), $scale, ( $show=='anal'?'capt':'anal' ),
( $show=='anal'?'without':'with' ),
);
}
?>
<img id="frameImg"

View File

@ -88,9 +88,15 @@ function initialAlarmCues(eventId) {
}
function setAlarmCues(data) {
cueFrames = data.frames;
alarmSpans = renderAlarmCues(vid ? $j("#videoobj") : $j("#evtStream"));//use videojs width or zms width
$j(".alarmCue").html(alarmSpans);
if (!data) {
Error('No data in setAlarmCues for event ' + eventData.Id);
} else if (!data.frames) {
Error('No data.frames in setAlarmCues for event ' + eventData.Id);
} else {
cueFrames = data.frames;
alarmSpans = renderAlarmCues(vid ? $j("#videoobj") : $j("#evtStream"));//use videojs width or zms width
$j(".alarmCue").html(alarmSpans);
}
}
function renderAlarmCues(containerEl) {

View File

@ -40,8 +40,8 @@ function changeScale() {
}
function getFrameStatsCookie() {
var cookie = 'zmFrameStats';
var stats = getCookie(cookie);
const cookie = 'zmFrameStats';
let stats = getCookie(cookie);
if (!stats) {
stats = 'on';
@ -53,7 +53,7 @@ function getFrameStatsCookie() {
function getStat(params) {
$j.getJSON(thisUrl + '?view=request&request=stats&raw=true', params)
.done(function(data) {
var stats = data.raw;
const stats = data.raw;
$j('#frameStatsTable').empty().append('<tbody>');

View File

@ -4,7 +4,9 @@ var saveBtn = $j('#saveBtn');
var cancelBtn = $j('#cancelBtn');
var backBtn = $j('#backBtn');
var refreshBtn = $j('#refreshBtn');
var analyseBtn = $j('#analyseBtn');
var monitors = [];
var analyse_frames = true;
function validateForm( form ) {
var errors = [];
@ -566,6 +568,7 @@ function watchdogCheck(type) {
function watchdogOk(type) {
watchdogInactive[type] = false;
}
function presetSelectorBlur() {
this.selectedIndex = 0;
}
@ -656,12 +659,34 @@ function initPage() {
};
}
if ( el = analyseBtn[0] ) {
el.onclick = function() {
console.log(analyse_frames);
analyse_frames = !analyse_frames;
if (analyse_frames) {
analyseBtn.addClass('btn-primary');
analyseBtn.removeClass('btn-secondary');
analyseBtn.attr('title', translate['Showing Analysis']);
} else {
analyseBtn.removeClass('btn-primaryary');
analyseBtn.addClass('btn-secondary');
analyseBtn.attr('title', translate['Not Showing Analysis']);
}
for ( var i = 0, length = monitors.length; i < length; i++ ) {
monitors[i].show_analyse_frames(analyse_frames);
}
};
} else {
console.log('Analyse button not found');
}
for ( var i = 0, length = monitorData.length; i < length; i++ ) {
monitors[i] = new MonitorStream(monitorData[i]);
// Start the fps and status updates. give a random delay so that we don't assault the server
var delay = Math.round( (Math.random()+0.5)*statusRefreshTimeout );
monitors[i].setScale('auto');
monitors[i].show_analyse_frames(analyse_frames);
monitors[i].start(delay);
}

View File

@ -97,6 +97,8 @@ var deleteString = "<?php echo translate('Delete') ?>";
var CMD_PAUSE = <?php echo CMD_PAUSE ?>;
var CMD_PLAY = <?php echo CMD_PLAY ?>;
var CMD_STOP = <?php echo CMD_STOP ?>;
var CMD_ANALYZE_ON = <?php echo CMD_ANALYZE_ON ?>;
var CMD_ANALYZE_OFF = <?php echo CMD_ANALYZE_OFF ?>;
var CMD_QUERY = <?php echo CMD_QUERY ?>;
var SCALE_BASE = <?php echo SCALE_BASE ?>;
@ -120,3 +122,7 @@ var canStreamNative = <?php echo canStreamNative()?'true':'false' ?>;
var refreshApplet = <?php echo (canStreamApplet() && $streamMode == "jpeg")?'true':'false' ?>;
var appletRefreshTime = <?php echo ZM_RELOAD_CAMBOZOLA ?>;
var translate = {
"Showing Analysis": '<?php echo translate('Showing Analysis'); ?>',
"Not Showing Analysis": '<?php echo translate('Not Showing Analysis'); ?>'
};

View File

@ -723,6 +723,10 @@ if (count($available_monitor_ids)) {
<td class="text-right pr-3"><?php echo translate('ONVIF_Options') ?></td>
<td><input type="text" name="newMonitor[ONVIF_Options]" value="<?php echo validHtmlStr($monitor->ONVIF_Options()) ?>"/></td>
</tr>
<tr>
<td class="text-right pr-3"><?php echo translate('ONVIF_Event_Listener') ?></td>
<td><?php echo html_radio('newMonitor[ONVIF_Event_Listener]', array('1'=>translate('Enabled'), '0'=>'Disabled'), $monitor->ONVIF_Event_Listener()); ?></td>
</tr>
<?php
break;
}

View File

@ -169,10 +169,13 @@ if ( count($other_zones) ) {
<?php echo translate('State') ?>:&nbsp;<span id="stateValue<?php echo $monitor->Id() ?>"></span>&nbsp;-&nbsp;<span id="fpsValue<?php echo $monitor->Id() ?>"></span>&nbsp;fps
</div>
<div id="StreamControlButtons">
<button type="button" id="pauseBtn" title="<?php echo translate('Pause') ?>">
<button type="button" id="analyseBtn" class="btn btn-primary" title="<?php echo translate('Showing Analysis') ?>">
<i class="material-icons md-18">assessment</i>
</button>
<button type="button" id="pauseBtn" class="btn btn-primary" title="<?php echo translate('Pause') ?>">
<i class="material-icons md-18">pause</i>
</button>
<button type="button" id="playBtn" title="<?php echo translate('Play') ?>">
<button type="button" id="playBtn" class="btn btn-primary" title="<?php echo translate('Play') ?>">
<i class="material-icons md-18">play_arrow</i>
</button>
</div>