From b1f8485969e9b8a09d7fade2b263fd6e3ff4b617 Mon Sep 17 00:00:00 2001 From: Isaac Connor Date: Tue, 16 May 2017 12:04:56 -0400 Subject: [PATCH] Fix videostorage (#1885) * merge relevant c bits to make h264 recording work * h264 code requires libavresample and boost * Need libavresample --- CMakeLists.txt | 24 + src/CMakeLists.txt | 2 +- src/zm_buffer.h | 2 +- src/zm_camera.cpp | 7 +- src/zm_camera.h | 21 +- src/zm_curl_camera.cpp | 72 +- src/zm_curl_camera.h | 7 +- src/zm_event.cpp | 456 +++----- src/zm_event.h | 60 +- src/zm_ffmpeg.cpp | 176 +-- src/zm_ffmpeg.h | 188 +-- src/zm_ffmpeg_camera.cpp | 660 ++++++----- src/zm_ffmpeg_camera.h | 99 +- src/zm_file_camera.cpp | 2 +- src/zm_file_camera.h | 3 +- src/zm_image.cpp | 2084 ++++++++++++++++----------------- src/zm_libvlc_camera.cpp | 26 +- src/zm_libvlc_camera.h | 2 +- src/zm_local_camera.cpp | 732 ++++++------ src/zm_local_camera.h | 59 +- src/zm_monitor.cpp | 1561 +++++++++--------------- src/zm_monitor.h | 125 +- src/zm_packet.cpp | 44 + src/zm_packet.h | 39 + src/zm_packetqueue.cpp | 152 +++ src/zm_packetqueue.h | 52 + src/zm_remote_camera.h | 2 +- src/zm_remote_camera_http.cpp | 2 +- src/zm_remote_camera_http.h | 4 +- src/zm_remote_camera_rtsp.cpp | 267 ++--- src/zm_remote_camera_rtsp.h | 2 +- src/zm_utils.cpp | 12 + src/zm_utils.h | 1 + src/zm_videostore.cpp | 802 ++++++++++--- src/zm_videostore.h | 81 +- zoneminder-config.cmake | 14 +- 36 files changed, 4071 insertions(+), 3771 deletions(-) create mode 100644 src/zm_packet.cpp create mode 100644 src/zm_packet.h create mode 100644 src/zm_packetqueue.cpp create mode 100644 src/zm_packetqueue.h diff --git a/CMakeLists.txt b/CMakeLists.txt index 420fdf0ba..d5f469d34 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -571,6 +571,23 @@ if(NOT ZM_NO_FFMPEG) set(optlibsnotfound "${optlibsnotfound} SWScale") endif(SWSCALE_LIBRARIES) + # rescale (using find_library and find_path) + find_library(AVRESAMPLE_LIBRARIES avresample) + if(AVRESAMPLE_LIBRARIES) + set(HAVE_LIBAVRESAMPLE 1) + list(APPEND ZM_BIN_LIBS "${AVRESAMPLE_LIBRARIES}") + find_path(AVRESAMPLE_INCLUDE_DIR "libavresample/avresample.h" /usr/include/ffmpeg) + if(AVRESAMPLE_INCLUDE_DIR) + include_directories("${AVRESAMPLE_INCLUDE_DIR}") + set(CMAKE_REQUIRED_INCLUDES "${AVRESAMPLE_INCLUDE_DIR}") + endif(AVRESAMPLE_INCLUDE_DIR) + mark_as_advanced(FORCE AVRESAMPLE_LIBRARIES AVRESAMPLE_INCLUDE_DIR) + check_include_file("libavresample/avresample.h" HAVE_LIBAVRESAMPLE_AVRESAMPLE_H) + set(optlibsfound "${optlibsfound} AVResample") + else(AVRESAMPLE_LIBRARIES) + set(optlibsnotfound "${optlibsnotfound} AVResample") + endif(AVRESAMPLE_LIBRARIES) + # Find the path to the ffmpeg executable find_program(FFMPEG_EXECUTABLE NAMES ffmpeg avconv @@ -603,6 +620,13 @@ if(NOT ZM_NO_LIBVLC) endif(LIBVLC_LIBRARIES) endif(NOT ZM_NO_LIBVLC) +find_package(Boost 1.36.0) +if(Boost_FOUND) + include_directories(${Boost_INCLUDE_DIRS}) + set(CMAKE_REQUIRED_INCLUDES "${Boost_INCLUDE_DIRS}") + list(APPEND ZM_BIN_LIBS "${Boost_LIBRARIES}") +endif() + # *** END OF LIBRARY CHECKS *** # Check for gnutls or crypto diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index bd9103f76..92a3027fe 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -4,7 +4,7 @@ configure_file(zm_config.h.in "${CMAKE_CURRENT_BINARY_DIR}/zm_config.h" @ONLY) # Group together all the source files that are used by all the binaries (zmc, zma, zmu, zms etc) -set(ZM_BIN_SRC_FILES zm_box.cpp zm_buffer.cpp zm_camera.cpp zm_comms.cpp zm_config.cpp zm_coord.cpp zm_curl_camera.cpp zm.cpp zm_db.cpp zm_logger.cpp zm_event.cpp zm_exception.cpp zm_file_camera.cpp zm_ffmpeg_camera.cpp zm_image.cpp zm_jpeg.cpp zm_libvlc_camera.cpp zm_local_camera.cpp zm_monitor.cpp zm_ffmpeg.cpp zm_mpeg.cpp zm_poly.cpp zm_regexp.cpp zm_remote_camera.cpp zm_remote_camera_http.cpp zm_remote_camera_rtsp.cpp zm_rtp.cpp zm_rtp_ctrl.cpp zm_rtp_data.cpp zm_rtp_source.cpp zm_rtsp.cpp zm_rtsp_auth.cpp zm_sdp.cpp zm_signal.cpp zm_stream.cpp zm_thread.cpp zm_time.cpp zm_timer.cpp zm_user.cpp zm_utils.cpp zm_video.cpp zm_videostore.cpp zm_zone.cpp) +set(ZM_BIN_SRC_FILES zm_box.cpp zm_buffer.cpp zm_camera.cpp zm_comms.cpp zm_config.cpp zm_coord.cpp zm_curl_camera.cpp zm.cpp zm_db.cpp zm_logger.cpp zm_event.cpp zm_exception.cpp zm_file_camera.cpp zm_ffmpeg_camera.cpp zm_image.cpp zm_jpeg.cpp zm_libvlc_camera.cpp zm_local_camera.cpp zm_monitor.cpp zm_ffmpeg.cpp zm_mpeg.cpp zm_packet.cpp zm_packetqueue.cpp zm_poly.cpp zm_regexp.cpp zm_remote_camera.cpp zm_remote_camera_http.cpp zm_remote_camera_rtsp.cpp zm_rtp.cpp zm_rtp_ctrl.cpp zm_rtp_data.cpp zm_rtp_source.cpp zm_rtsp.cpp zm_rtsp_auth.cpp zm_sdp.cpp zm_signal.cpp zm_stream.cpp zm_thread.cpp zm_time.cpp zm_timer.cpp zm_user.cpp zm_utils.cpp zm_video.cpp zm_videostore.cpp zm_zone.cpp) # A fix for cmake recompiling the source files for every target. add_library(zm STATIC ${ZM_BIN_SRC_FILES}) diff --git a/src/zm_buffer.h b/src/zm_buffer.h index bcc952dc6..6fa18984e 100644 --- a/src/zm_buffer.h +++ b/src/zm_buffer.h @@ -153,7 +153,7 @@ public: mHead = mTail = mStorage; else if ( level ) { - if ( (mHead-mStorage) > mSize ) + if ( ((uintptr_t)mHead-(uintptr_t)mStorage) > mSize ) { memcpy( mStorage, mHead, mSize ); mHead = mStorage; diff --git a/src/zm_camera.cpp b/src/zm_camera.cpp index 660c11a8b..0ea7af373 100644 --- a/src/zm_camera.cpp +++ b/src/zm_camera.cpp @@ -20,7 +20,7 @@ #include "zm.h" #include "zm_camera.h" -Camera::Camera( unsigned int p_monitor_id, SourceType p_type, int p_width, int p_height, int p_colours, int p_subpixelorder, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ) : +Camera::Camera( unsigned int p_monitor_id, SourceType p_type, unsigned int p_width, unsigned int p_height, int p_colours, int p_subpixelorder, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ) : monitor_id( p_monitor_id ), type( p_type ), width( p_width), @@ -55,3 +55,8 @@ Monitor *Camera::getMonitor() { monitor = Monitor::Load( monitor_id, false, Monitor::QUERY ); return monitor; } + +void Camera::setMonitor( Monitor *p_monitor ) { + monitor = p_monitor; + monitor_id = monitor->Id(); +} diff --git a/src/zm_camera.h b/src/zm_camera.h index 132703e0c..4d991d495 100644 --- a/src/zm_camera.h +++ b/src/zm_camera.h @@ -47,19 +47,20 @@ protected: unsigned int subpixelorder; unsigned int pixels; unsigned int imagesize; - int brightness; - int hue; - int colour; - int contrast; - bool capture; - bool record_audio; + int brightness; + int hue; + int colour; + int contrast; + bool capture; + bool record_audio; public: - Camera( unsigned int p_monitor_id, SourceType p_type, int p_width, int p_height, int p_colours, int p_subpixelorder, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); + Camera( unsigned int p_monitor_id, SourceType p_type, unsigned int p_width, unsigned int p_height, int p_colours, int p_subpixelorder, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); virtual ~Camera(); unsigned int getId() const { return( monitor_id ); } Monitor *getMonitor(); + void setMonitor( Monitor *p_monitor ); SourceType Type() const { return( type ); } bool IsLocal() const { return( type == LOCAL_SRC ); } bool IsRemote() const { return( type == REMOTE_SRC ); } @@ -80,14 +81,14 @@ public: virtual int Contrast( int/*p_contrast*/=-1 ) { return( -1 ); } bool CanCapture() const { return( capture ); } - + bool SupportsNativeVideo() const { return( (type == FFMPEG_SRC )||(type == REMOTE_SRC)); } - + virtual int PrimeCapture() { return( 0 ); } virtual int PreCapture()=0; virtual int Capture( Image &image )=0; virtual int PostCapture()=0; - virtual int CaptureAndRecord( Image &image, bool recording, char* event_directory)=0; + virtual int CaptureAndRecord( Image &image, timeval recording, char* event_directory ) = 0; }; #endif // ZM_CAMERA_H diff --git a/src/zm_curl_camera.cpp b/src/zm_curl_camera.cpp index 0cdf3ec36..a5717d9ca 100644 --- a/src/zm_curl_camera.cpp +++ b/src/zm_curl_camera.cpp @@ -18,8 +18,11 @@ // #include "zm.h" + #include "zm_curl_camera.h" +#include "zm_packetqueue.h" + #if HAVE_LIBCURL #define CURL_MAXRETRY 5 @@ -30,28 +33,24 @@ const char* content_type_match = "Content-Type:"; size_t content_length_match_len; size_t content_type_match_len; -cURLCamera::cURLCamera( int p_id, const std::string &p_path, const std::string &p_user, const std::string &p_pass, int p_width, int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ) : +cURLCamera::cURLCamera( int p_id, const std::string &p_path, const std::string &p_user, const std::string &p_pass, unsigned int p_width, unsigned int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ) : Camera( p_id, CURL_SRC, p_width, p_height, p_colours, ZM_SUBPIX_ORDER_DEFAULT_FOR_COLOUR(p_colours), p_brightness, p_contrast, p_hue, p_colour, p_capture, p_record_audio ), mPath( p_path ), mUser( p_user ), mPass ( p_pass ), bTerminate( false ), bReset( false ), mode ( MODE_UNSET ) { - if ( capture ) - { + if ( capture ) { Initialise(); } } -cURLCamera::~cURLCamera() -{ - if ( capture ) - { +cURLCamera::~cURLCamera() { + if ( capture ) { Terminate(); } } -void cURLCamera::Initialise() -{ +void cURLCamera::Initialise() { content_length_match_len = strlen(content_length_match); content_type_match_len = strlen(content_type_match); @@ -88,8 +87,7 @@ void cURLCamera::Initialise() } } -void cURLCamera::Terminate() -{ +void cURLCamera::Terminate() { /* Signal the thread to terminate */ bTerminate = true; @@ -108,20 +106,17 @@ void cURLCamera::Terminate() } -int cURLCamera::PrimeCapture() -{ +int cURLCamera::PrimeCapture() { //Info( "Priming capture from %s", mPath.c_str() ); return 0; } -int cURLCamera::PreCapture() -{ - // Nothing to do here - return( 0 ); +int cURLCamera::PreCapture() { + // Nothing to do here + return( 0 ); } -int cURLCamera::Capture( Image &image ) -{ +int cURLCamera::Capture( Image &image ) { bool frameComplete = false; /* MODE_STREAM specific variables */ @@ -305,22 +300,19 @@ int cURLCamera::Capture( Image &image ) return 0; } -int cURLCamera::PostCapture() -{ - // Nothing to do here - return( 0 ); -} - -int cURLCamera::CaptureAndRecord( Image &image, bool recording, char* event_directory ) -{ - Error("Capture and Record not implemented for the cURL camera type"); +int cURLCamera::PostCapture() { // Nothing to do here return( 0 ); } +int cURLCamera::CaptureAndRecord( Image &image, struct timeval recording, char* event_directory ) { + Error("Capture and Record not implemented for the cURL camera type"); + // Nothing to do here + return( 0 ); +} -size_t cURLCamera::data_callback(void *buffer, size_t size, size_t nmemb, void *userdata) -{ + +size_t cURLCamera::data_callback(void *buffer, size_t size, size_t nmemb, void *userdata) { lock(); /* Append the data we just received to our buffer */ @@ -341,8 +333,7 @@ size_t cURLCamera::data_callback(void *buffer, size_t size, size_t nmemb, void * -size_t cURLCamera::header_callback( void *buffer, size_t size, size_t nmemb, void *userdata) -{ +size_t cURLCamera::header_callback( void *buffer, size_t size, size_t nmemb, void *userdata) { std::string header; header.assign((const char*)buffer, size*nmemb); @@ -382,8 +373,7 @@ size_t cURLCamera::header_callback( void *buffer, size_t size, size_t nmemb, voi return size*nmemb; } -void* cURLCamera::thread_func() -{ +void* cURLCamera::thread_func() { long tRet; double dSize; @@ -529,8 +519,7 @@ int cURLCamera::unlock() { return nRet; } -int cURLCamera::progress_callback(void *userdata, double dltotal, double dlnow, double ultotal, double ulnow) -{ +int cURLCamera::progress_callback(void *userdata, double dltotal, double dlnow, double ultotal, double ulnow) { /* Signal the curl thread to terminate */ if(bTerminate) return -10; @@ -539,18 +528,15 @@ int cURLCamera::progress_callback(void *userdata, double dltotal, double dlnow, } /* These functions call the functions in the class for the correct object */ -size_t data_callback_dispatcher(void *buffer, size_t size, size_t nmemb, void *userdata) -{ +size_t data_callback_dispatcher(void *buffer, size_t size, size_t nmemb, void *userdata) { return ((cURLCamera*)userdata)->data_callback(buffer,size,nmemb,userdata); } -size_t header_callback_dispatcher(void *buffer, size_t size, size_t nmemb, void *userdata) -{ +size_t header_callback_dispatcher(void *buffer, size_t size, size_t nmemb, void *userdata) { return ((cURLCamera*)userdata)->header_callback(buffer,size,nmemb,userdata); } -int progress_callback_dispatcher(void *userdata, double dltotal, double dlnow, double ultotal, double ulnow) -{ +int progress_callback_dispatcher(void *userdata, double dltotal, double dlnow, double ultotal, double ulnow) { return ((cURLCamera*)userdata)->progress_callback(userdata,dltotal,dlnow,ultotal,ulnow); } @@ -558,6 +544,4 @@ void* thread_func_dispatcher(void* object) { return ((cURLCamera*)object)->thread_func(); } - - #endif // HAVE_LIBCURL diff --git a/src/zm_curl_camera.h b/src/zm_curl_camera.h index f9247dd50..c9dc2e935 100644 --- a/src/zm_curl_camera.h +++ b/src/zm_curl_camera.h @@ -39,8 +39,7 @@ // Class representing 'curl' cameras, i.e. those which are // accessed using the curl library // -class cURLCamera : public Camera -{ +class cURLCamera : public Camera { protected: typedef enum {MODE_UNSET, MODE_SINGLE, MODE_STREAM} mode_t; @@ -65,7 +64,7 @@ protected: pthread_cond_t request_complete_cond; public: - cURLCamera( int p_id, const std::string &path, const std::string &username, const std::string &password, int p_width, int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); + cURLCamera( int p_id, const std::string &path, const std::string &username, const std::string &password, unsigned int p_width, unsigned int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); ~cURLCamera(); const std::string &Path() const { return( mPath ); } @@ -79,7 +78,7 @@ public: int PreCapture(); int Capture( Image &image ); int PostCapture(); - int CaptureAndRecord( Image &image, bool recording, char* event_directory); + int CaptureAndRecord( Image &image, struct timeval recording, char* event_directory ); size_t data_callback(void *buffer, size_t size, size_t nmemb, void *userdata); size_t header_callback(void *buffer, size_t size, size_t nmemb, void *userdata); diff --git a/src/zm_event.cpp b/src/zm_event.cpp index ea5b3af00..577f48b64 100644 --- a/src/zm_event.cpp +++ b/src/zm_event.cpp @@ -72,8 +72,7 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string createNotes( notes ); bool untimedEvent = false; - if ( !start_time.tv_sec ) - { + if ( !start_time.tv_sec ) { untimedEvent = true; gettimeofday( &start_time, 0 ); } @@ -82,14 +81,12 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string struct tm *stime = localtime( &start_time.tv_sec ); snprintf( sql, sizeof(sql), "insert into Events ( MonitorId, Name, StartTime, Width, Height, Cause, Notes, Videoed ) values ( %d, 'New Event', from_unixtime( %ld ), %d, %d, '%s', '%s', '%d' )", monitor->Id(), start_time.tv_sec, monitor->Width(), monitor->Height(), cause.c_str(), notes.c_str(), videoEvent ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't insert event: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } id = mysql_insert_id( &dbconn ); - if ( untimedEvent ) - { + if ( untimedEvent ) { Warning( "Event %d has zero time, setting to current", id ); } end_time.tv_sec = 0; @@ -98,8 +95,7 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string tot_score = 0; max_score = 0; - if ( config.use_deep_storage ) - { + if ( config.use_deep_storage ) { char *path_ptr = path; path_ptr += snprintf( path_ptr, sizeof(path), "%s/%d", config.dir_events, monitor->Id() ); @@ -114,17 +110,14 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string char date_path[PATH_MAX] = ""; char time_path[PATH_MAX] = ""; char *time_path_ptr = time_path; - for ( unsigned int i = 0; i < sizeof(dt_parts)/sizeof(*dt_parts); i++ ) - { + for ( unsigned int i = 0; i < sizeof(dt_parts)/sizeof(*dt_parts); i++ ) { path_ptr += snprintf( path_ptr, sizeof(path)-(path_ptr-path), "/%02d", dt_parts[i] ); struct stat statbuf; errno = 0; if ( stat( path, &statbuf ) ) { - if ( errno == ENOENT || errno == ENOTDIR ) - { - if ( mkdir( path, 0755 ) ) - { + if ( errno == ENOENT || errno == ENOTDIR ) { + if ( mkdir( path, 0755 ) ) { Fatal( "Can't mkdir %s: %s", path, strerror(errno)); } } else { @@ -147,18 +140,14 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string fclose( id_fp ); else Fatal( "Can't fopen %s: %s", id_file, strerror(errno)); - } - else - { + } else { snprintf( path, sizeof(path), "%s/%d/%d", config.dir_events, monitor->Id(), id ); struct stat statbuf; errno = 0; stat( path, &statbuf ); - if ( errno == ENOENT || errno == ENOTDIR ) - { - if ( mkdir( path, 0755 ) ) - { + if ( errno == ENOENT || errno == ENOTDIR ) { + if ( mkdir( path, 0755 ) ) { Error( "Can't mkdir %s: %s", path, strerror(errno)); } } @@ -169,7 +158,7 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string fclose( id_fp ); else Fatal( "Can't fopen %s: %s", id_file, strerror(errno)); - } + } // deep storage or not last_db_frame = 0; video_name[0] = 0; @@ -177,7 +166,6 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string /* Save as video */ if ( monitor->GetOptVideoWriter() != 0 ) { - int nRet; snprintf( video_name, sizeof(video_name), "%d-%s", id, "video.mp4" ); snprintf( video_file, sizeof(video_file), video_file_format, path, video_name ); snprintf( timecodes_name, sizeof(timecodes_name), "%d-%s", id, "video.timecodes" ); @@ -188,14 +176,13 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string #if ZM_HAVE_VIDEOWRITER_X264MP4 videowriter = new X264MP4Writer(video_file, monitor->Width(), monitor->Height(), monitor->Colours(), monitor->SubpixelOrder(), monitor->GetOptEncoderParams()); #else - videowriter = NULL; Error("ZoneMinder was not compiled with the X264 MP4 video writer, check dependencies (x264 and mp4v2)"); #endif } if(videowriter != NULL) { /* Open the video stream */ - nRet = videowriter->Open(); + int nRet = videowriter->Open(); if(nRet != 0) { Error("Failed opening video stream"); delete videowriter; @@ -213,20 +200,18 @@ Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string videowriter = NULL; } -} +} // Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string &p_cause, const StringSetMap &p_noteSetMap, bool p_videoEvent ) -Event::~Event() -{ - if ( frames > last_db_frame ) - { - struct DeltaTimeval delta_time; - DELTA_TIMEVAL( delta_time, end_time, start_time, DT_PREC_2 ); +Event::~Event() { + static char sql[ZM_SQL_MED_BUFSIZ]; + struct DeltaTimeval delta_time; + DELTA_TIMEVAL( delta_time, end_time, start_time, DT_PREC_2 ); + + if ( frames > last_db_frame ) { Debug( 1, "Adding closing frame %d to DB", frames ); - static char sql[ZM_SQL_SML_BUFSIZ]; snprintf( sql, sizeof(sql), "insert into Frames ( EventId, FrameId, TimeStamp, Delta ) values ( %d, %d, from_unixtime( %ld ), %s%ld.%02ld )", id, frames, end_time.tv_sec, delta_time.positive?"":"-", delta_time.sec, delta_time.fsec ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't insert frame: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -234,9 +219,7 @@ Event::~Event() /* Close the video file */ if ( videowriter != NULL ) { - int nRet; - - nRet = videowriter->Close(); + int nRet = videowriter->Close(); if(nRet != 0) { Error("Failed closing video stream"); } @@ -248,29 +231,20 @@ Event::~Event() timecodes_fd = NULL; } - static char sql[ZM_SQL_MED_BUFSIZ]; - - struct DeltaTimeval delta_time; - DELTA_TIMEVAL( delta_time, end_time, start_time, DT_PREC_2 ); - snprintf( sql, sizeof(sql), "update Events set Name='%s%d', EndTime = from_unixtime( %ld ), Length = %s%ld.%02ld, Frames = %d, AlarmFrames = %d, TotScore = %d, AvgScore = %d, MaxScore = %d, DefaultVideo = '%s' where Id = %d", monitor->EventPrefix(), id, end_time.tv_sec, delta_time.positive?"":"-", delta_time.sec, delta_time.fsec, frames, alarm_frames, tot_score, (int)(alarm_frames?(tot_score/alarm_frames):0), max_score, video_name, id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't update event: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } } -void Event::createNotes( std::string ¬es ) -{ +void Event::createNotes( std::string ¬es ) { notes.clear(); - for ( StringSetMap::const_iterator mapIter = noteSetMap.begin(); mapIter != noteSetMap.end(); mapIter++ ) - { + for ( StringSetMap::const_iterator mapIter = noteSetMap.begin(); mapIter != noteSetMap.end(); mapIter++ ) { notes += mapIter->first; notes += ": "; const StringSet &stringSet = mapIter->second; - for ( StringSet::const_iterator setIter = stringSet.begin(); setIter != stringSet.end(); setIter++ ) - { + for ( StringSet::const_iterator setIter = stringSet.begin(); setIter != stringSet.end(); setIter++ ) { if ( setIter != stringSet.begin() ) notes += ", "; notes += *setIter; @@ -280,8 +254,7 @@ void Event::createNotes( std::string ¬es ) int Event::sd = -1; -bool Event::WriteFrameImage( Image *image, struct timeval timestamp, const char *event_file, bool alarm_frame ) -{ +bool Event::WriteFrameImage( Image *image, struct timeval timestamp, const char *event_file, bool alarm_frame ) { Image* ImgToWrite; Image* ts_image = NULL; @@ -290,8 +263,7 @@ bool Event::WriteFrameImage( Image *image, struct timeval timestamp, const char ts_image = new Image(*image); monitor->TimestampImage( ts_image, ×tamp ); ImgToWrite=ts_image; - } - else + } else ImgToWrite=image; int thisquality = ( alarm_frame && (config.jpeg_alarm_file_quality > config.jpeg_file_quality) ) ? config.jpeg_alarm_file_quality : 0 ; // quality to use, zero is default @@ -301,14 +273,12 @@ bool Event::WriteFrameImage( Image *image, struct timeval timestamp, const char return( true ); } -bool Event::WriteFrameVideo( const Image *image, const struct timeval timestamp, VideoWriter* videow ) -{ +bool Event::WriteFrameVideo( const Image *image, const struct timeval timestamp, VideoWriter* videow ) { const Image* frameimg = image; Image ts_image; /* Checking for invalid parameters */ - if ( videow == NULL ) - { + if ( videow == NULL ) { Error("NULL Video object"); return false; } @@ -336,44 +306,32 @@ bool Event::WriteFrameVideo( const Image *image, const struct timeval timestamp, return( true ); } -void Event::updateNotes( const StringSetMap &newNoteSetMap ) -{ +void Event::updateNotes( const StringSetMap &newNoteSetMap ) { bool update = false; //Info( "Checking notes, %d <> %d", noteSetMap.size(), newNoteSetMap.size() ); - if ( newNoteSetMap.size() > 0 ) - { - if ( noteSetMap.size() == 0 ) - { + if ( newNoteSetMap.size() > 0 ) { + if ( noteSetMap.size() == 0 ) { noteSetMap = newNoteSetMap; update = true; - } - else - { - for ( StringSetMap::const_iterator newNoteSetMapIter = newNoteSetMap.begin(); newNoteSetMapIter != newNoteSetMap.end(); newNoteSetMapIter++ ) - { + } else { + for ( StringSetMap::const_iterator newNoteSetMapIter = newNoteSetMap.begin(); newNoteSetMapIter != newNoteSetMap.end(); newNoteSetMapIter++ ) { const std::string &newNoteGroup = newNoteSetMapIter->first; const StringSet &newNoteSet = newNoteSetMapIter->second; //Info( "Got %d new strings", newNoteSet.size() ); - if ( newNoteSet.size() > 0 ) - { + if ( newNoteSet.size() > 0 ) { StringSetMap::iterator noteSetMapIter = noteSetMap.find( newNoteGroup ); - if ( noteSetMapIter == noteSetMap.end() ) - { + if ( noteSetMapIter == noteSetMap.end() ) { //Info( "Can't find note group %s, copying %d strings", newNoteGroup.c_str(), newNoteSet.size() ); noteSetMap.insert( StringSetMap::value_type( newNoteGroup, newNoteSet ) ); update = true; - } - else - { + } else { StringSet ¬eSet = noteSetMapIter->second; //Info( "Found note group %s, got %d strings", newNoteGroup.c_str(), newNoteSet.size() ); - for ( StringSet::const_iterator newNoteSetIter = newNoteSet.begin(); newNoteSetIter != newNoteSet.end(); newNoteSetIter++ ) - { + for ( StringSet::const_iterator newNoteSetIter = newNoteSet.begin(); newNoteSetIter != newNoteSet.end(); newNoteSetIter++ ) { const std::string &newNote = *newNoteSetIter; StringSet::iterator noteSetIter = noteSet.find( newNote ); - if ( noteSetIter == noteSet.end() ) - { + if ( noteSetIter == noteSet.end() ) { noteSet.insert( newNote ); update = true; } @@ -384,8 +342,7 @@ void Event::updateNotes( const StringSetMap &newNoteSetMap ) } } - if ( update ) - { + if ( update ) { std::string notes; createNotes( notes ); @@ -397,19 +354,16 @@ void Event::updateNotes( const StringSetMap &newNoteSetMap ) char notesStr[ZM_SQL_MED_BUFSIZ] = ""; unsigned long notesLen = 0; - if ( !stmt ) - { + if ( !stmt ) { const char *sql = "update Events set Notes = ? where Id = ?"; stmt = mysql_stmt_init( &dbconn ); - if ( mysql_stmt_prepare( stmt, sql, strlen(sql) ) ) - { + if ( mysql_stmt_prepare( stmt, sql, strlen(sql) ) ) { Fatal( "Unable to prepare sql '%s': %s", sql, mysql_stmt_error(stmt) ); } /* Get the parameter count from the statement */ - if ( mysql_stmt_param_count( stmt ) != 2 ) - { + if ( mysql_stmt_param_count( stmt ) != 2 ) { Fatal( "Unexpected parameter count %ld in sql '%s'", mysql_stmt_param_count( stmt ), sql ); } @@ -429,8 +383,7 @@ void Event::updateNotes( const StringSetMap &newNoteSetMap ) bind[1].length= 0; /* Bind the buffers */ - if ( mysql_stmt_bind_param( stmt, bind ) ) - { + if ( mysql_stmt_bind_param( stmt, bind ) ) { Fatal( "Unable to bind sql '%s': %s", sql, mysql_stmt_error(stmt) ); } } @@ -438,8 +391,7 @@ void Event::updateNotes( const StringSetMap &newNoteSetMap ) strncpy( notesStr, notes.c_str(), sizeof(notesStr) ); notesLen = notes.length(); - if ( mysql_stmt_execute( stmt ) ) - { + if ( mysql_stmt_execute( stmt ) ) { Fatal( "Unable to execute sql '%s': %s", sql, mysql_stmt_error(stmt) ); } #else @@ -448,30 +400,25 @@ void Event::updateNotes( const StringSetMap &newNoteSetMap ) mysql_real_escape_string( &dbconn, escapedNotes, notes.c_str(), notes.length() ); snprintf( sql, sizeof(sql), "update Events set Notes = '%s' where Id = %d", escapedNotes, id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't insert event: %s", mysql_error( &dbconn ) ); } #endif } } -void Event::AddFrames( int n_frames, Image **images, struct timeval **timestamps ) -{ +void Event::AddFrames( int n_frames, Image **images, struct timeval **timestamps ) { for (int i = 0; i < n_frames; i += ZM_SQL_BATCH_SIZE) { AddFramesInternal(n_frames, i, images, timestamps); } } -void Event::AddFramesInternal( int n_frames, int start_frame, Image **images, struct timeval **timestamps ) -{ +void Event::AddFramesInternal( int n_frames, int start_frame, Image **images, struct timeval **timestamps ) { static char sql[ZM_SQL_LGE_BUFSIZ]; strncpy( sql, "insert into Frames ( EventId, FrameId, TimeStamp, Delta ) values ", sizeof(sql) ); int frameCount = 0; - for ( int i = start_frame; i < n_frames && i - start_frame < ZM_SQL_BATCH_SIZE; i++ ) - { - if ( !timestamps[i]->tv_sec ) - { + for ( int i = start_frame; i < n_frames && i - start_frame < ZM_SQL_BATCH_SIZE; i++ ) { + if ( !timestamps[i]->tv_sec ) { Debug( 1, "Not adding pre-capture frame %d, zero timestamp", i ); continue; } @@ -505,27 +452,21 @@ void Event::AddFramesInternal( int n_frames, int start_frame, Image **images, st frameCount++; } - if ( frameCount ) - { + if ( frameCount ) { Debug( 1, "Adding %d/%d frames to DB", frameCount, n_frames ); *(sql+strlen(sql)-2) = '\0'; - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't insert frames: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } last_db_frame = frames; - } - else - { + } else { Debug( 1, "No valid pre-capture frames to add" ); } } -void Event::AddFrame( Image *image, struct timeval timestamp, int score, Image *alarm_image ) -{ - if ( !timestamp.tv_sec ) - { +void Event::AddFrame( Image *image, struct timeval timestamp, int score, Image *alarm_image ) { + if ( !timestamp.tv_sec ) { Debug( 1, "Not adding new frame, zero timestamp" ); return; } @@ -559,25 +500,21 @@ void Event::AddFrame( Image *image, struct timeval timestamp, int score, Image * score = 0; bool db_frame = (strcmp(frame_type,"Bulk") != 0) || ((frames%config.bulk_frame_interval)==0) || !frames; - if ( db_frame ) - { + if ( db_frame ) { Debug( 1, "Adding frame %d of type \"%s\" to DB", frames, frame_type ); static char sql[ZM_SQL_MED_BUFSIZ]; snprintf( sql, sizeof(sql), "insert into Frames ( EventId, FrameId, Type, TimeStamp, Delta, Score ) values ( %d, %d, '%s', from_unixtime( %ld ), %s%ld.%02ld, %d )", id, frames, frame_type, timestamp.tv_sec, delta_time.positive?"":"-", delta_time.sec, delta_time.fsec, score ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't insert frame: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } last_db_frame = frames; // We are writing a Bulk frame - if ( !strcmp( frame_type,"Bulk") ) - { + if ( !strcmp( frame_type,"Bulk") ) { snprintf( sql, sizeof(sql), "update Events set Length = %s%ld.%02ld, Frames = %d, AlarmFrames = %d, TotScore = %d, AvgScore = %d, MaxScore = %d where Id = %d", delta_time.positive?"":"-", delta_time.sec, delta_time.fsec, frames, alarm_frames, tot_score, (int)(alarm_frames?(tot_score/alarm_frames):0), max_score, id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't update event: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -586,17 +523,15 @@ void Event::AddFrame( Image *image, struct timeval timestamp, int score, Image * end_time = timestamp; - // We are writing an Alarm frame - if ( !strcmp( frame_type,"Alarm") ) - { + // We are writing an Alarm frame + if ( !strcmp( frame_type,"Alarm") ) { alarm_frames++; tot_score += score; if ( score > (int)max_score ) max_score = score; - if ( alarm_image ) - { + if ( alarm_image ) { snprintf( event_file, sizeof(event_file), analyse_file_format, path, frames ); Debug( 1, "Writing analysis frame %d", frames ); @@ -650,28 +585,24 @@ void Event::AddFrame( Image *image, struct timeval timestamp, int score, Image * */ } -bool EventStream::loadInitialEventData( int monitor_id, time_t event_time ) -{ +bool EventStream::loadInitialEventData( int monitor_id, time_t event_time ) { static char sql[ZM_SQL_SML_BUFSIZ]; snprintf( sql, sizeof(sql), "select Id from Events where MonitorId = %d and unix_timestamp( EndTime ) > %ld order by Id asc limit 1", monitor_id, event_time ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_RES *result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_ROW dbrow = mysql_fetch_row( result ); - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -682,17 +613,13 @@ bool EventStream::loadInitialEventData( int monitor_id, time_t event_time ) loadEventData( init_event_id ); - if ( event_time ) - { + if ( event_time ) { curr_stream_time = event_time; curr_frame_id = 1; - if ( event_time >= event_data->start_time ) - { - for (unsigned int i = 0; i < event_data->frame_count; i++ ) - { + if ( event_time >= event_data->start_time ) { + for (unsigned int i = 0; i < event_data->frame_count; i++ ) { //Info( "eft %d > et %d", event_data->frames[i].timestamp, event_time ); - if ( event_data->frames[i].timestamp >= event_time ) - { + if ( event_data->frames[i].timestamp >= event_time ) { curr_frame_id = i+1; Debug( 3, "Set cst:%.2f", curr_stream_time ); Debug( 3, "Set cfid:%d", curr_frame_id ); @@ -705,51 +632,42 @@ bool EventStream::loadInitialEventData( int monitor_id, time_t event_time ) return( true ); } -bool EventStream::loadInitialEventData( int init_event_id, unsigned int init_frame_id ) -{ +bool EventStream::loadInitialEventData( int init_event_id, unsigned int init_frame_id ) { loadEventData( init_event_id ); - if ( init_frame_id ) - { + if ( init_frame_id ) { curr_stream_time = event_data->frames[init_frame_id-1].timestamp; curr_frame_id = init_frame_id; - } - else - { + } else { curr_stream_time = event_data->start_time; } return( true ); } -bool EventStream::loadEventData( int event_id ) -{ +bool EventStream::loadEventData( int event_id ) { static char sql[ZM_SQL_MED_BUFSIZ]; snprintf( sql, sizeof(sql), "select M.Id, M.Name, E.Frames, unix_timestamp( StartTime ) as StartTimestamp, max(F.Delta)-min(F.Delta) as Duration from Events as E inner join Monitors as M on E.MonitorId = M.Id inner join Frames as F on E.Id = F.EventId where E.Id = %d group by E.Id", event_id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_RES *result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } - if ( !mysql_num_rows( result ) ) - { + if ( !mysql_num_rows( result ) ) { Fatal( "Unable to load event %d, not found in DB", event_id ); } MYSQL_ROW dbrow = mysql_fetch_row( result ); - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -759,16 +677,13 @@ bool EventStream::loadEventData( int event_id ) event_data->event_id = event_id; event_data->monitor_id = atoi( dbrow[0] ); event_data->start_time = atoi(dbrow[3]); - if ( config.use_deep_storage ) - { + if ( config.use_deep_storage ) { struct tm *event_time = localtime( &event_data->start_time ); if ( config.dir_events[0] == '/' ) snprintf( event_data->path, sizeof(event_data->path), "%s/%ld/%02d/%02d/%02d/%02d/%02d/%02d", config.dir_events, event_data->monitor_id, event_time->tm_year-100, event_time->tm_mon+1, event_time->tm_mday, event_time->tm_hour, event_time->tm_min, event_time->tm_sec ); else snprintf( event_data->path, sizeof(event_data->path), "%s/%s/%ld/%02d/%02d/%02d/%02d/%02d/%02d", staticConfig.PATH_WEB.c_str(), config.dir_events, event_data->monitor_id, event_time->tm_year-100, event_time->tm_mon+1, event_time->tm_mday, event_time->tm_hour, event_time->tm_min, event_time->tm_sec ); - } - else - { + } else { if ( config.dir_events[0] == '/' ) snprintf( event_data->path, sizeof(event_data->path), "%s/%ld/%ld", config.dir_events, event_data->monitor_id, event_data->event_id ); else @@ -782,15 +697,13 @@ bool EventStream::loadEventData( int event_id ) mysql_free_result( result ); snprintf( sql, sizeof(sql), "select FrameId, unix_timestamp( `TimeStamp` ), Delta from Frames where EventId = %d order by FrameId asc", event_id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -801,17 +714,14 @@ bool EventStream::loadEventData( int event_id ) int id, last_id = 0; time_t timestamp, last_timestamp = event_data->start_time; double delta, last_delta = 0.0; - while ( ( dbrow = mysql_fetch_row( result ) ) ) - { + while ( ( dbrow = mysql_fetch_row( result ) ) ) { id = atoi(dbrow[0]); timestamp = atoi(dbrow[1]); delta = atof(dbrow[2]); int id_diff = id - last_id; double frame_delta = (delta-last_delta)/id_diff; - if ( id_diff > 1 ) - { - for ( int i = last_id+1; i < id; i++ ) - { + if ( id_diff > 1 ) { + for ( int i = last_id+1; i < id; i++ ) { event_data->frames[i-1].timestamp = (time_t)(last_timestamp + ((i-last_id)*frame_delta)); event_data->frames[i-1].offset = (time_t)(event_data->frames[i-1].timestamp-event_data->start_time); event_data->frames[i-1].delta = frame_delta; @@ -826,8 +736,7 @@ bool EventStream::loadEventData( int event_id ) last_delta = delta; last_timestamp = timestamp; } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -839,8 +748,7 @@ bool EventStream::loadEventData( int event_id ) mysql_free_result( result ); - if ( forceEventChange || mode == MODE_ALL_GAPLESS ) - { + if ( forceEventChange || mode == MODE_ALL_GAPLESS ) { if ( replay_rate > 0 ) curr_stream_time = event_data->frames[0].timestamp; else @@ -851,12 +759,10 @@ bool EventStream::loadEventData( int event_id ) return( true ); } -void EventStream::processCommand( const CmdMsg *msg ) -{ +void EventStream::processCommand( const CmdMsg *msg ) { Debug( 2, "Got message, type %d, msg %d", msg->msg_type, msg->msg_data[0] ) // Check for incoming command - switch( (MsgCommand)msg->msg_data[0] ) - { + switch( (MsgCommand)msg->msg_data[0] ) { case CMD_PAUSE : { Debug( 1, "Got PAUSE command" ); @@ -905,14 +811,12 @@ void EventStream::processCommand( const CmdMsg *msg ) case CMD_FASTFWD : { Debug( 1, "Got FAST FWD command" ); - if ( paused ) - { + if ( paused ) { // Clear paused flag paused = false; } // Set play rate - switch ( replay_rate ) - { + switch ( replay_rate ) { case 2 * ZM_RATE_BASE : replay_rate = 5 * ZM_RATE_BASE; break; @@ -957,14 +861,12 @@ void EventStream::processCommand( const CmdMsg *msg ) case CMD_FASTREV : { Debug( 1, "Got FAST REV command" ); - if ( paused ) - { + if ( paused ) { // Clear paused flag paused = false; } // Set play rate - switch ( replay_rate ) - { + switch ( replay_rate ) { case -2 * ZM_RATE_BASE : replay_rate = -5 * ZM_RATE_BASE; break; @@ -989,8 +891,7 @@ void EventStream::processCommand( const CmdMsg *msg ) x = ((unsigned char)msg->msg_data[1]<<8)|(unsigned char)msg->msg_data[2]; y = ((unsigned char)msg->msg_data[3]<<8)|(unsigned char)msg->msg_data[4]; Debug( 1, "Got ZOOM IN command, to %d,%d", x, y ); - switch ( zoom ) - { + switch ( zoom ) { case 100: zoom = 150; break; @@ -1013,8 +914,7 @@ void EventStream::processCommand( const CmdMsg *msg ) case CMD_ZOOMOUT : { Debug( 1, "Got ZOOM OUT command" ); - switch ( zoom ) - { + switch ( zoom ) { case 500: zoom = 400; break; @@ -1115,8 +1015,7 @@ void EventStream::processCommand( const CmdMsg *msg ) DataMsg status_msg; status_msg.msg_type = MSG_DATA_EVENT; memcpy( &status_msg.msg_data, &status_data, sizeof(status_data) ); - if ( sendto( sd, &status_msg, sizeof(status_msg), MSG_DONTWAIT, (sockaddr *)&rem_addr, sizeof(rem_addr) ) < 0 ) - { + if ( sendto( sd, &status_msg, sizeof(status_msg), MSG_DONTWAIT, (sockaddr *)&rem_addr, sizeof(rem_addr) ) < 0 ) { //if ( errno != EAGAIN ) { Error( "Can't sendto on sd %d: %s", sd, strerror(errno) ); @@ -1130,49 +1029,39 @@ void EventStream::processCommand( const CmdMsg *msg ) updateFrameRate( (double)event_data->frame_count/event_data->duration ); } -void EventStream::checkEventLoaded() -{ +void EventStream::checkEventLoaded() { bool reload_event = false; static char sql[ZM_SQL_SML_BUFSIZ]; - if ( curr_frame_id <= 0 ) - { + if ( curr_frame_id <= 0 ) { snprintf( sql, sizeof(sql), "select Id from Events where MonitorId = %ld and Id < %ld order by Id desc limit 1", event_data->monitor_id, event_data->event_id ); reload_event = true; - } - else if ( (unsigned int)curr_frame_id > event_data->frame_count ) - { + } else if ( (unsigned int)curr_frame_id > event_data->frame_count ) { snprintf( sql, sizeof(sql), "select Id from Events where MonitorId = %ld and Id > %ld order by Id asc limit 1", event_data->monitor_id, event_data->event_id ); reload_event = true; } - if ( reload_event ) - { - if ( forceEventChange || mode != MODE_SINGLE ) - { + if ( reload_event ) { + if ( forceEventChange || mode != MODE_SINGLE ) { //Info( "SQL:%s", sql ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_RES *result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_ROW dbrow = mysql_fetch_row( result ); - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } - if ( dbrow ) - { + if ( dbrow ) { int event_id = atoi(dbrow[0]); Debug( 1, "Loading new event %d", event_id ); @@ -1184,9 +1073,7 @@ void EventStream::checkEventLoaded() else curr_frame_id = 1; Debug( 2, "New frame id = %d", curr_frame_id ); - } - else - { + } else { if ( curr_frame_id <= 0 ) curr_frame_id = 1; else @@ -1195,9 +1082,7 @@ void EventStream::checkEventLoaded() } mysql_free_result( result ); forceEventChange = false; - } - else - { + } else { if ( curr_frame_id <= 0 ) curr_frame_id = 1; else @@ -1207,8 +1092,7 @@ void EventStream::checkEventLoaded() } } -bool EventStream::sendFrame( int delta_us ) -{ +bool EventStream::sendFrame( int delta_us ) { Debug( 2, "Sending frame %d", curr_frame_id ); static char filepath[PATH_MAX]; @@ -1218,21 +1102,18 @@ bool EventStream::sendFrame( int delta_us ) snprintf( filepath, sizeof(filepath), Event::capture_file_format, event_data->path, curr_frame_id ); #if HAVE_LIBAVCODEC - if ( type == STREAM_MPEG ) - { + if ( type == STREAM_MPEG ) { Image image( filepath ); Image *send_image = prepareImage( &image ); - if ( !vid_stream ) - { + if ( !vid_stream ) { vid_stream = new VideoStream( "pipe:", format, bitrate, effective_fps, send_image->Colours(), send_image->SubpixelOrder(), send_image->Width(), send_image->Height() ); fprintf( stdout, "Content-type: %s\r\n\r\n", vid_stream->MimeType() ); vid_stream->OpenStream(); } /* double pts = */ vid_stream->EncodeFrame( send_image->Buffer(), send_image->Size(), config.mpeg_timed_frames, delta_us*1000 ); - } - else + } else #endif // HAVE_LIBAVCODEC { static unsigned char temp_img_buffer[ZM_MAX_IMAGE_SIZE]; @@ -1247,31 +1128,26 @@ bool EventStream::sendFrame( int delta_us ) if ( type != STREAM_JPEG ) send_raw = false; - if ( send_raw ) - { + if ( send_raw ) { fdj = fopen( filepath, "rb" ); - if ( !fdj ) - { + if ( !fdj ) { Error( "Can't open %s: %s", filepath, strerror(errno) ); return( false ); } #if HAVE_SENDFILE if( fstat(fileno(fdj),&filestat) < 0 ) { - Error( "Failed getting information about file %s: %s", filepath, strerror(errno) ); - return( false ); - } + Error( "Failed getting information about file %s: %s", filepath, strerror(errno) ); + return( false ); + } #else - img_buffer_size = fread( img_buffer, 1, sizeof(temp_img_buffer), fdj ); + img_buffer_size = fread( img_buffer, 1, sizeof(temp_img_buffer), fdj ); #endif - } - else - { + } else { Image image( filepath ); Image *send_image = prepareImage( &image ); - switch( type ) - { + switch( type ) { case STREAM_JPEG : send_image->EncodeJpeg( img_buffer, &img_buffer_size ); break; @@ -1295,8 +1171,7 @@ bool EventStream::sendFrame( int delta_us ) } } - switch( type ) - { + switch( type ) { case STREAM_JPEG : fprintf( stdout, "Content-Type: image/jpeg\r\n" ); break; @@ -1312,33 +1187,32 @@ bool EventStream::sendFrame( int delta_us ) } - if(send_raw) { + if(send_raw) { #if HAVE_SENDFILE - fprintf( stdout, "Content-Length: %d\r\n\r\n", (int)filestat.st_size ); - if(zm_sendfile(fileno(stdout), fileno(fdj), 0, (int)filestat.st_size) != (int)filestat.st_size) { - /* sendfile() failed, use standard way instead */ - img_buffer_size = fread( img_buffer, 1, sizeof(temp_img_buffer), fdj ); + fprintf( stdout, "Content-Length: %d\r\n\r\n", (int)filestat.st_size ); + if(zm_sendfile(fileno(stdout), fileno(fdj), 0, (int)filestat.st_size) != (int)filestat.st_size) { + /* sendfile() failed, use standard way instead */ + img_buffer_size = fread( img_buffer, 1, sizeof(temp_img_buffer), fdj ); + if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { + Error("Unable to send raw frame %u: %s",curr_frame_id,strerror(errno)); + return( false ); + } + } +#else + fprintf( stdout, "Content-Length: %d\r\n\r\n", img_buffer_size ); if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { Error("Unable to send raw frame %u: %s",curr_frame_id,strerror(errno)); return( false ); } - } -#else - fprintf( stdout, "Content-Length: %d\r\n\r\n", img_buffer_size ); - if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { - Error("Unable to send raw frame %u: %s",curr_frame_id,strerror(errno)); - return( false ); - } #endif - fclose(fdj); /* Close the file handle */ - } else { - fprintf( stdout, "Content-Length: %d\r\n\r\n", img_buffer_size ); - if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) - { - Error( "Unable to send stream frame: %s", strerror(errno) ); - return( false ); + fclose(fdj); /* Close the file handle */ + } else { + fprintf( stdout, "Content-Length: %d\r\n\r\n", img_buffer_size ); + if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { + Error( "Unable to send stream frame: %s", strerror(errno) ); + return( false ); + } } - } fprintf( stdout, "\r\n\r\n" ); fflush( stdout ); @@ -1347,8 +1221,7 @@ bool EventStream::sendFrame( int delta_us ) return( true ); } -void EventStream::runStream() -{ +void EventStream::runStream() { Event::Initialise(); openComms(); @@ -1360,15 +1233,13 @@ void EventStream::runStream() if ( type == STREAM_JPEG ) fprintf( stdout, "Content-Type: multipart/x-mixed-replace;boundary=ZoneMinderFrame\r\n\r\n" ); - if ( !event_data ) - { + if ( !event_data ) { sendTextFrame( "No event data found" ); exit( 0 ); } unsigned int delta_us = 0; - while( !zm_terminate ) - { + while( !zm_terminate ) { gettimeofday( &now, NULL ); while(checkCommandQueue()); @@ -1384,27 +1255,21 @@ void EventStream::runStream() //Info( "cst:%.2f", curr_stream_time ); //Info( "cfid:%d", curr_frame_id ); //Info( "fdt:%d", frame_data->timestamp ); - if ( !paused ) - { + if ( !paused ) { bool in_event = true; double time_to_event = 0; - if ( replay_rate > 0 ) - { + if ( replay_rate > 0 ) { time_to_event = event_data->frames[0].timestamp - curr_stream_time; if ( time_to_event > 0 ) in_event = false; - } - else if ( replay_rate < 0 ) - { + } else if ( replay_rate < 0 ) { time_to_event = curr_stream_time - event_data->frames[event_data->frame_count-1].timestamp; if ( time_to_event > 0 ) in_event = false; } - if ( !in_event ) - { + if ( !in_event ) { double actual_delta_time = TV_2_FLOAT( now ) - last_frame_sent; - if ( actual_delta_time > 1 ) - { + if ( actual_delta_time > 1 ) { static char frame_text[64]; snprintf( frame_text, sizeof(frame_text), "Time to next event = %d seconds", (int)time_to_event ); if ( !sendTextFrame( frame_text ) ) @@ -1422,11 +1287,9 @@ void EventStream::runStream() // Figure out if we should send this frame bool send_frame = false; - if ( !paused ) - { + if ( !paused ) { // If we are streaming and this frame is due to be sent - if ( ((curr_frame_id-1)%frame_mod) == 0 ) - { + if ( ((curr_frame_id-1)%frame_mod) == 0 ) { delta_us = (unsigned int)(frame_data->delta * 1000000); // if effective > base we should speed up frame delivery delta_us = (unsigned int)((delta_us * base_fps)/effective_fps); @@ -1434,19 +1297,14 @@ void EventStream::runStream() delta_us = max(delta_us, 1000000 / maxfps); send_frame = true; } - } - else if ( step != 0 ) - { + } else if ( step != 0 ) { // We are paused and are just stepping forward or backward one frame step = 0; send_frame = true; - } - else - { + } else { // We are paused, and doing nothing double actual_delta_time = TV_2_FLOAT( now ) - last_frame_sent; - if ( actual_delta_time > MAX_STREAM_DELAY ) - { + if ( actual_delta_time > MAX_STREAM_DELAY ) { // Send keepalive Debug( 2, "Sending keepalive frame" ); send_frame = true; @@ -1459,17 +1317,13 @@ void EventStream::runStream() curr_stream_time = frame_data->timestamp; - if ( !paused ) - { + if ( !paused ) { curr_frame_id += replay_rate>0?1:-1; - if ( send_frame && type != STREAM_MPEG ) - { + if ( send_frame && type != STREAM_MPEG ) { Debug( 3, "dUs: %d", delta_us ); usleep( delta_us ); } - } - else - { + } else { usleep( (unsigned long)((1000000 * ZM_RATE_BASE)/((base_fps?base_fps:1)*abs(replay_rate*2))) ); } } diff --git a/src/zm_event.h b/src/zm_event.h index ac5631846..6b0a56349 100644 --- a/src/zm_event.h +++ b/src/zm_event.h @@ -47,8 +47,7 @@ class Monitor; // // Class describing events, i.e. captured periods of activity. // -class Event -{ +class Event { friend class EventStream; protected: @@ -68,8 +67,7 @@ public: protected: typedef enum { NORMAL, BULK, ALARM } FrameType; - struct PreAlarmData - { + struct PreAlarmData { Image *image; struct timeval timestamp; unsigned int score; @@ -103,8 +101,7 @@ protected: int last_db_frame; protected: - static void Initialise() - { + static void Initialise() { if ( initialised ) return; @@ -148,33 +145,26 @@ private: void AddFramesInternal( int n_frames, int start_frame, Image **images, struct timeval **timestamps ); public: - static const char *getSubPath( struct tm *time ) - { + static const char *getSubPath( struct tm *time ) { static char subpath[PATH_MAX] = ""; snprintf( subpath, sizeof(subpath), "%02d/%02d/%02d/%02d/%02d/%02d", time->tm_year-100, time->tm_mon+1, time->tm_mday, time->tm_hour, time->tm_min, time->tm_sec ); return( subpath ); } - static const char *getSubPath( time_t *time ) - { + static const char *getSubPath( time_t *time ) { return( Event::getSubPath( localtime( time ) ) ); } - char* getEventFile(void) - { + char* getEventFile(void) { return video_file; } public: - static int PreAlarmCount() - { + static int PreAlarmCount() { return( pre_alarm_count ); } - static void EmptyPreAlarmFrames() - { - if ( pre_alarm_count > 0 ) - { - for ( int i = 0; i < MAX_PRE_ALARM_FRAMES; i++ ) - { + static void EmptyPreAlarmFrames() { + if ( pre_alarm_count > 0 ) { + for ( int i = 0; i < MAX_PRE_ALARM_FRAMES; i++ ) { delete pre_alarm_data[i].image; delete pre_alarm_data[i].alarm_frame; } @@ -182,29 +172,24 @@ public: } pre_alarm_count = 0; } - static void AddPreAlarmFrame( Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=NULL ) - { + static void AddPreAlarmFrame( Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=NULL ) { pre_alarm_data[pre_alarm_count].image = new Image( *image ); pre_alarm_data[pre_alarm_count].timestamp = timestamp; pre_alarm_data[pre_alarm_count].score = score; - if ( alarm_frame ) - { + if ( alarm_frame ) { pre_alarm_data[pre_alarm_count].alarm_frame = new Image( *alarm_frame ); } pre_alarm_count++; } - void SavePreAlarmFrames() - { - for ( int i = 0; i < pre_alarm_count; i++ ) - { + void SavePreAlarmFrames() { + for ( int i = 0; i < pre_alarm_count; i++ ) { AddFrame( pre_alarm_data[i].image, pre_alarm_data[i].timestamp, pre_alarm_data[i].score, pre_alarm_data[i].alarm_frame ); } EmptyPreAlarmFrames(); } }; -class EventStream : public StreamBase -{ +class EventStream : public StreamBase { public: typedef enum { MODE_SINGLE, MODE_ALL, MODE_ALL_GAPLESS } StreamMode; @@ -217,8 +202,7 @@ protected: bool in_db; }; - struct EventData - { + struct EventData { unsigned long event_id; unsigned long monitor_id; unsigned long frame_count; @@ -254,8 +238,7 @@ protected: bool sendFrame( int delta_us ); public: - EventStream() - { + EventStream() { mode = DEFAULT_MODE; forceEventChange = false; @@ -265,18 +248,15 @@ public: event_data = 0; } - void setStreamStart( int init_event_id, unsigned int init_frame_id=0 ) - { + void setStreamStart( int init_event_id, unsigned int init_frame_id=0 ) { loadInitialEventData( init_event_id, init_frame_id ); loadMonitor( event_data->monitor_id ); } - void setStreamStart( int monitor_id, time_t event_time ) - { + void setStreamStart( int monitor_id, time_t event_time ) { loadInitialEventData( monitor_id, event_time ); loadMonitor( monitor_id ); } - void setStreamMode( StreamMode p_mode ) - { + void setStreamMode( StreamMode p_mode ) { mode = p_mode; } void runStream(); diff --git a/src/zm_ffmpeg.cpp b/src/zm_ffmpeg.cpp index a83c4e65a..3cccdf3e8 100644 --- a/src/zm_ffmpeg.cpp +++ b/src/zm_ffmpeg.cpp @@ -41,40 +41,40 @@ enum _AVPIXELFORMAT GetFFMPEGPixelFormat(unsigned int p_colours, unsigned p_subp switch(p_colours) { case ZM_COLOUR_RGB24: - { - if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { - /* BGR subpixel order */ - pf = AV_PIX_FMT_BGR24; - } else { - /* Assume RGB subpixel order */ - pf = AV_PIX_FMT_RGB24; - } - break; - } + { + if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { + /* BGR subpixel order */ + pf = AV_PIX_FMT_BGR24; + } else { + /* Assume RGB subpixel order */ + pf = AV_PIX_FMT_RGB24; + } + break; + } case ZM_COLOUR_RGB32: - { - if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - /* ARGB subpixel order */ - pf = AV_PIX_FMT_ARGB; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - /* ABGR subpixel order */ - pf = AV_PIX_FMT_ABGR; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - /* BGRA subpixel order */ - pf = AV_PIX_FMT_BGRA; - } else { - /* Assume RGBA subpixel order */ - pf = AV_PIX_FMT_RGBA; - } - break; - } + { + if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + /* ARGB subpixel order */ + pf = AV_PIX_FMT_ARGB; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + /* ABGR subpixel order */ + pf = AV_PIX_FMT_ABGR; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + /* BGRA subpixel order */ + pf = AV_PIX_FMT_BGRA; + } else { + /* Assume RGBA subpixel order */ + pf = AV_PIX_FMT_RGBA; + } + break; + } case ZM_COLOUR_GRAY8: - pf = AV_PIX_FMT_GRAY8; - break; + pf = AV_PIX_FMT_GRAY8; + break; default: - Panic("Unexpected colours: %d",p_colours); - pf = AV_PIX_FMT_GRAY8; /* Just to shush gcc variable may be unused warning */ - break; + Panic("Unexpected colours: %d",p_colours); + pf = AV_PIX_FMT_GRAY8; /* Just to shush gcc variable may be unused warning */ + break; } return pf; @@ -158,25 +158,17 @@ SWScale::SWScale() : gotdefaults(false), swscale_ctx(NULL), input_avframe(NULL), SWScale::~SWScale() { /* Free up everything */ -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) av_frame_free( &input_avframe ); -#else - av_freep( &input_avframe ); -#endif //input_avframe = NULL; -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) av_frame_free( &output_avframe ); -#else - av_freep( &output_avframe ); -#endif //output_avframe = NULL; if(swscale_ctx) { sws_freeContext(swscale_ctx); swscale_ctx = NULL; } - + Debug(4,"SWScale object destroyed"); } @@ -233,13 +225,14 @@ int SWScale::Convert(const uint8_t* in_buffer, const size_t in_buffer_size, uint #else size_t outsize = avpicture_get_size(out_pf, width, height); #endif + if(outsize < out_buffer_size) { Error("The output buffer is undersized for the output format. Required: %d Available: %d", outsize, out_buffer_size); return -5; } /* Get the context */ - swscale_ctx = sws_getCachedContext(swscale_ctx, width, height, in_pf, width, height, out_pf, 0, NULL, NULL, NULL); + swscale_ctx = sws_getCachedContext( swscale_ctx, width, height, in_pf, width, height, out_pf, SWS_FAST_BILINEAR, NULL, NULL, NULL ); if(swscale_ctx == NULL) { Error("Failed getting swscale context"); return -6; @@ -366,6 +359,22 @@ int hacked_up_context2_for_older_ffmpeg(AVFormatContext **avctx, AVOutputFormat } } + if (!oformat) { + if (format) { + oformat = av_guess_format(format, NULL, NULL); + if (!oformat) { + av_log(s, AV_LOG_ERROR, "Requested output format '%s' is not a suitable output format\n", format); + ret = AVERROR(EINVAL); + } + } else { + oformat = av_guess_format(NULL, filename, NULL); + if (!oformat) { + ret = AVERROR(EINVAL); + av_log(s, AV_LOG_ERROR, "Unable to find a suitable output format for '%s'\n", filename); + } + } + } + if (ret) { avformat_free_context(s); return ret; @@ -387,21 +396,21 @@ int hacked_up_context2_for_older_ffmpeg(AVFormatContext **avctx, AVOutputFormat } if (filename) strncpy(s->filename, filename, sizeof(s->filename)); - *avctx = s; - return 0; + *avctx = s; + return 0; } } static void zm_log_fps(double d, const char *postfix) { uint64_t v = lrintf(d * 100); if (!v) { - Debug(3, "%1.4f %s", d, postfix); + Debug(1, "%1.4f %s", d, postfix); } else if (v % 100) { - Debug(3, "%3.2f %s", d, postfix); + Debug(1, "%3.2f %s", d, postfix); } else if (v % (100 * 1000)) { - Debug(3, "%1.0f %s", d, postfix); + Debug(1, "%1.0f %s", d, postfix); } else - Debug(3, "%1.0fk %s", d / 1000, postfix); + Debug(1, "%1.0fk %s", d / 1000, postfix); } /* "user interface" functions */ @@ -413,28 +422,27 @@ void zm_dump_stream_format(AVFormatContext *ic, int i, int index, int is_output) AVDictionaryEntry *lang = av_dict_get(st->metadata, "language", NULL, 0); avcodec_string(buf, sizeof(buf), st->codec, is_output); - Debug(3, " Stream #%d:%d", index, i); + Debug(1, " Stream #%d:%d", index, i); /* the pid is an important information, so we display it */ /* XXX: add a generic system */ if (flags & AVFMT_SHOW_IDS) - Debug(3, "[0x%x]", st->id); + Debug(1, "[0x%x]", st->id); if (lang) - Debug(3, "(%s)", lang->value); - av_log(NULL, AV_LOG_DEBUG, ", %d, %d/%d", st->codec_info_nb_frames, - st->time_base.num, st->time_base.den); - Debug(3, ": %s", buf); + Debug(1, "(%s)", lang->value); + Debug(1, ", %d, %d/%d", st->codec_info_nb_frames, st->time_base.num, st->time_base.den); + Debug(1, ": %s", buf); if (st->sample_aspect_ratio.num && // default - av_cmp_q(st->sample_aspect_ratio, st->codec->sample_aspect_ratio)) { + av_cmp_q(st->sample_aspect_ratio, st->codec->sample_aspect_ratio)) { AVRational display_aspect_ratio; av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, - st->codec->width * (int64_t)st->sample_aspect_ratio.num, - st->codec->height * (int64_t)st->sample_aspect_ratio.den, - 1024 * 1024); - Debug(3, ", SAR %d:%d DAR %d:%d", - st->sample_aspect_ratio.num, st->sample_aspect_ratio.den, - display_aspect_ratio.num, display_aspect_ratio.den); + st->codec->width * (int64_t)st->sample_aspect_ratio.num, + st->codec->height * (int64_t)st->sample_aspect_ratio.den, + 1024 * 1024); + Debug(1, ", SAR %d:%d DAR %d:%d", + st->sample_aspect_ratio.num, st->sample_aspect_ratio.den, + display_aspect_ratio.num, display_aspect_ratio.den); } if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) { @@ -448,34 +456,56 @@ void zm_dump_stream_format(AVFormatContext *ic, int i, int index, int is_output) if (fps) zm_log_fps(av_q2d(st->avg_frame_rate), tbn || tbc ? "fps, " : "fps"); if (tbn) - zm_log_fps(1 / av_q2d(st->time_base), tbc ? "tbn, " : "tbn"); + zm_log_fps(1 / av_q2d(st->time_base), tbc ? "stream tb numerator , " : "stream tb numerator"); if (tbc) - zm_log_fps(1 / av_q2d(st->codec->time_base), "tbc"); + zm_log_fps(1 / av_q2d(st->codec->time_base), "codec time base:"); } if (st->disposition & AV_DISPOSITION_DEFAULT) - Debug(3, " (default)"); + Debug(1, " (default)"); if (st->disposition & AV_DISPOSITION_DUB) - Debug(3, " (dub)"); + Debug(1, " (dub)"); if (st->disposition & AV_DISPOSITION_ORIGINAL) - Debug(3, " (original)"); + Debug(1, " (original)"); if (st->disposition & AV_DISPOSITION_COMMENT) - Debug(3, " (comment)"); + Debug(1, " (comment)"); if (st->disposition & AV_DISPOSITION_LYRICS) - Debug(3, " (lyrics)"); + Debug(1, " (lyrics)"); if (st->disposition & AV_DISPOSITION_KARAOKE) - Debug(3, " (karaoke)"); + Debug(1, " (karaoke)"); if (st->disposition & AV_DISPOSITION_FORCED) - Debug(3, " (forced)"); + Debug(1, " (forced)"); if (st->disposition & AV_DISPOSITION_HEARING_IMPAIRED) - Debug(3, " (hearing impaired)"); + Debug(1, " (hearing impaired)"); if (st->disposition & AV_DISPOSITION_VISUAL_IMPAIRED) - Debug(3, " (visual impaired)"); + Debug(1, " (visual impaired)"); if (st->disposition & AV_DISPOSITION_CLEAN_EFFECTS) - Debug(3, " (clean effects)"); - Debug(3, "\n"); + Debug(1, " (clean effects)"); + Debug(1, "\n"); //dump_metadata(NULL, st->metadata, " "); //dump_sidedata(NULL, st, " "); } + +int check_sample_fmt(AVCodec *codec, enum AVSampleFormat sample_fmt) { + const enum AVSampleFormat *p = codec->sample_fmts; + + while (*p != AV_SAMPLE_FMT_NONE) { + if (*p == sample_fmt) + return 1; + else Debug(2, "Not %s", av_get_sample_fmt_name( *p ) ); + p++; + } + return 0; +} + +#if LIBAVCODEC_VERSION_CHECK(56, 8, 0, 60, 100) +#else +unsigned int zm_av_packet_ref( AVPacket *dst, AVPacket *src ) { + dst->data = reinterpret_cast(new uint64_t[(src->size + FF_INPUT_BUFFER_PADDING_SIZE)/sizeof(uint64_t) + 1]); + memcpy(dst->data, src->data, src->size ); + return 0; +} +#endif + diff --git a/src/zm_ffmpeg.h b/src/zm_ffmpeg.h index 24c6c8872..f94c575f5 100644 --- a/src/zm_ffmpeg.h +++ b/src/zm_ffmpeg.h @@ -41,8 +41,8 @@ extern "C" { * b and c the minor and micro versions of libav * d and e the minor and micro versions of FFmpeg */ #define LIBAVUTIL_VERSION_CHECK(a, b, c, d, e) \ - ( (LIBAVUTIL_VERSION_MICRO < 100 && LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ - (LIBAVUTIL_VERSION_MICRO >= 100 && LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) + ( (LIBAVUTIL_VERSION_MICRO < 100 && LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ + (LIBAVUTIL_VERSION_MICRO >= 100 && LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) #if LIBAVUTIL_VERSION_CHECK(50, 29, 0, 29, 0) #include @@ -59,55 +59,55 @@ extern "C" { #include #include #endif /* HAVE_LIBAVUTIL_AVUTIL_H */ - + #if defined(HAVE_LIBAVUTIL_AVUTIL_H) #if LIBAVUTIL_VERSION_CHECK(51, 42, 0, 74, 100) - #define _AVPIXELFORMAT AVPixelFormat + #define _AVPIXELFORMAT AVPixelFormat #else - #define _AVPIXELFORMAT PixelFormat - #define AV_PIX_FMT_NONE PIX_FMT_NONE - #define AV_PIX_FMT_RGB444 PIX_FMT_RGB444 - #define AV_PIX_FMT_RGB555 PIX_FMT_RGB555 - #define AV_PIX_FMT_RGB565 PIX_FMT_RGB565 - #define AV_PIX_FMT_BGR24 PIX_FMT_BGR24 - #define AV_PIX_FMT_RGB24 PIX_FMT_RGB24 - #define AV_PIX_FMT_BGRA PIX_FMT_BGRA - #define AV_PIX_FMT_ARGB PIX_FMT_ARGB - #define AV_PIX_FMT_ABGR PIX_FMT_ABGR - #define AV_PIX_FMT_RGBA PIX_FMT_RGBA - #define AV_PIX_FMT_GRAY8 PIX_FMT_GRAY8 - #define AV_PIX_FMT_YUYV422 PIX_FMT_YUYV422 - #define AV_PIX_FMT_YUV422P PIX_FMT_YUV422P - #define AV_PIX_FMT_YUV411P PIX_FMT_YUV411P - #define AV_PIX_FMT_YUV444P PIX_FMT_YUV444P - #define AV_PIX_FMT_YUV410P PIX_FMT_YUV410P - #define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P - #define AV_PIX_FMT_YUVJ444P PIX_FMT_YUVJ444P - #define AV_PIX_FMT_UYVY422 PIX_FMT_UYVY422 - #define AV_PIX_FMT_YUVJ420P PIX_FMT_YUVJ420P - #define AV_PIX_FMT_YUVJ422P PIX_FMT_YUVJ422P - #define AV_PIX_FMT_UYVY422 PIX_FMT_UYVY422 - #define AV_PIX_FMT_UYYVYY411 PIX_FMT_UYYVYY411 - #define AV_PIX_FMT_BGR565 PIX_FMT_BGR565 - #define AV_PIX_FMT_BGR555 PIX_FMT_BGR555 - #define AV_PIX_FMT_BGR8 PIX_FMT_BGR8 - #define AV_PIX_FMT_BGR4 PIX_FMT_BGR4 - #define AV_PIX_FMT_BGR4_BYTE PIX_FMT_BGR4_BYTE - #define AV_PIX_FMT_RGB8 PIX_FMT_RGB8 - #define AV_PIX_FMT_RGB4 PIX_FMT_RGB4 - #define AV_PIX_FMT_RGB4_BYTE PIX_FMT_RGB4_BYTE - #define AV_PIX_FMT_NV12 PIX_FMT_NV12 - #define AV_PIX_FMT_NV21 PIX_FMT_NV21 - #define AV_PIX_FMT_RGB32_1 PIX_FMT_RGB32_1 - #define AV_PIX_FMT_BGR32_1 PIX_FMT_BGR32_1 - #define AV_PIX_FMT_GRAY16BE PIX_FMT_GRAY16BE - #define AV_PIX_FMT_GRAY16LE PIX_FMT_GRAY16LE - #define AV_PIX_FMT_YUV440P PIX_FMT_YUV440P - #define AV_PIX_FMT_YUVJ440P PIX_FMT_YUVJ440P - #define AV_PIX_FMT_YUVA420P PIX_FMT_YUVA420P - //#define AV_PIX_FMT_VDPAU_H264 PIX_FMT_VDPAU_H264 - //#define AV_PIX_FMT_VDPAU_MPEG1 PIX_FMT_VDPAU_MPEG1 - //#define AV_PIX_FMT_VDPAU_MPEG2 PIX_FMT_VDPAU_MPEG2 + #define _AVPIXELFORMAT PixelFormat + #define AV_PIX_FMT_NONE PIX_FMT_NONE + #define AV_PIX_FMT_RGB444 PIX_FMT_RGB444 + #define AV_PIX_FMT_RGB555 PIX_FMT_RGB555 + #define AV_PIX_FMT_RGB565 PIX_FMT_RGB565 + #define AV_PIX_FMT_BGR24 PIX_FMT_BGR24 + #define AV_PIX_FMT_RGB24 PIX_FMT_RGB24 + #define AV_PIX_FMT_BGRA PIX_FMT_BGRA + #define AV_PIX_FMT_ARGB PIX_FMT_ARGB + #define AV_PIX_FMT_ABGR PIX_FMT_ABGR + #define AV_PIX_FMT_RGBA PIX_FMT_RGBA + #define AV_PIX_FMT_GRAY8 PIX_FMT_GRAY8 + #define AV_PIX_FMT_YUYV422 PIX_FMT_YUYV422 + #define AV_PIX_FMT_YUV422P PIX_FMT_YUV422P + #define AV_PIX_FMT_YUV411P PIX_FMT_YUV411P + #define AV_PIX_FMT_YUV444P PIX_FMT_YUV444P + #define AV_PIX_FMT_YUV410P PIX_FMT_YUV410P + #define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P + #define AV_PIX_FMT_YUVJ444P PIX_FMT_YUVJ444P + #define AV_PIX_FMT_UYVY422 PIX_FMT_UYVY422 + #define AV_PIX_FMT_YUVJ420P PIX_FMT_YUVJ420P + #define AV_PIX_FMT_YUVJ422P PIX_FMT_YUVJ422P + #define AV_PIX_FMT_UYVY422 PIX_FMT_UYVY422 + #define AV_PIX_FMT_UYYVYY411 PIX_FMT_UYYVYY411 + #define AV_PIX_FMT_BGR565 PIX_FMT_BGR565 + #define AV_PIX_FMT_BGR555 PIX_FMT_BGR555 + #define AV_PIX_FMT_BGR8 PIX_FMT_BGR8 + #define AV_PIX_FMT_BGR4 PIX_FMT_BGR4 + #define AV_PIX_FMT_BGR4_BYTE PIX_FMT_BGR4_BYTE + #define AV_PIX_FMT_RGB8 PIX_FMT_RGB8 + #define AV_PIX_FMT_RGB4 PIX_FMT_RGB4 + #define AV_PIX_FMT_RGB4_BYTE PIX_FMT_RGB4_BYTE + #define AV_PIX_FMT_NV12 PIX_FMT_NV12 + #define AV_PIX_FMT_NV21 PIX_FMT_NV21 + #define AV_PIX_FMT_RGB32_1 PIX_FMT_RGB32_1 + #define AV_PIX_FMT_BGR32_1 PIX_FMT_BGR32_1 + #define AV_PIX_FMT_GRAY16BE PIX_FMT_GRAY16BE + #define AV_PIX_FMT_GRAY16LE PIX_FMT_GRAY16LE + #define AV_PIX_FMT_YUV440P PIX_FMT_YUV440P + #define AV_PIX_FMT_YUVJ440P PIX_FMT_YUVJ440P + #define AV_PIX_FMT_YUVA420P PIX_FMT_YUVA420P + //#define AV_PIX_FMT_VDPAU_H264 PIX_FMT_VDPAU_H264 + //#define AV_PIX_FMT_VDPAU_MPEG1 PIX_FMT_VDPAU_MPEG1 + //#define AV_PIX_FMT_VDPAU_MPEG2 PIX_FMT_VDPAU_MPEG2 #endif #endif /* HAVE_LIBAVUTIL_AVUTIL_H */ @@ -122,8 +122,8 @@ extern "C" { * b and c the minor and micro versions of libav * d and e the minor and micro versions of FFmpeg */ #define LIBAVCODEC_VERSION_CHECK(a, b, c, d, e) \ - ( (LIBAVCODEC_VERSION_MICRO < 100 && LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ - (LIBAVCODEC_VERSION_MICRO >= 100 && LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) + ( (LIBAVCODEC_VERSION_MICRO < 100 && LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ + (LIBAVCODEC_VERSION_MICRO >= 100 && LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) #elif HAVE_FFMPEG_AVCODEC_H #include @@ -131,9 +131,9 @@ extern "C" { #if defined(HAVE_LIBAVCODEC_AVCODEC_H) #if LIBAVCODEC_VERSION_CHECK(54, 25, 0, 51, 100) - #define _AVCODECID AVCodecID + #define _AVCODECID AVCodecID #else - #define _AVCODECID CodecID + #define _AVCODECID CodecID #endif #endif /* HAVE_LIBAVCODEC_AVCODEC_H */ @@ -147,8 +147,8 @@ extern "C" { * b and c the minor and micro versions of libav * d and e the minor and micro versions of FFmpeg */ #define LIBAVFORMAT_VERSION_CHECK(a, b, c, d, e) \ - ( (LIBAVFORMAT_VERSION_MICRO < 100 && LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ - (LIBAVFORMAT_VERSION_MICRO >= 100 && LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) + ( (LIBAVFORMAT_VERSION_MICRO < 100 && LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ + (LIBAVFORMAT_VERSION_MICRO >= 100 && LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) #elif HAVE_FFMPEG_AVFORMAT_H #include @@ -163,8 +163,8 @@ extern "C" { * b and c the minor and micro versions of libav * d and e the minor and micro versions of FFmpeg */ #define LIBAVDEVICE_VERSION_CHECK(a, b, c, d, e) \ - ( (LIBAVDEVICE_VERSION_MICRO < 100 && LIBAVDEVICE_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ - (LIBAVDEVICE_VERSION_MICRO >= 100 && LIBAVDEVICE_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) + ( (LIBAVDEVICE_VERSION_MICRO < 100 && LIBAVDEVICE_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ + (LIBAVDEVICE_VERSION_MICRO >= 100 && LIBAVDEVICE_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) #elif HAVE_FFMPEG_AVDEVICE_H #include @@ -179,8 +179,8 @@ extern "C" { * b and c the minor and micro versions of libav * d and e the minor and micro versions of FFmpeg */ #define LIBSWSCALE_VERSION_CHECK(a, b, c, d, e) \ - ( (LIBSWSCALE_VERSION_MICRO < 100 && LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ - (LIBSWSCALE_VERSION_MICRO >= 100 && LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) + ( (LIBSWSCALE_VERSION_MICRO < 100 && LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(a, b, c) ) || \ + (LIBSWSCALE_VERSION_MICRO >= 100 && LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(a, d, e) ) ) #elif HAVE_FFMPEG_SWSCALE_H #include @@ -212,23 +212,23 @@ enum _AVPIXELFORMAT GetFFMPEGPixelFormat(unsigned int p_colours, unsigned p_subp #if HAVE_LIBSWSCALE && HAVE_LIBAVUTIL class SWScale { public: - SWScale(); - ~SWScale(); - int SetDefaults(enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); - int ConvertDefaults(const Image* img, uint8_t* out_buffer, const size_t out_buffer_size); - int ConvertDefaults(const uint8_t* in_buffer, const size_t in_buffer_size, uint8_t* out_buffer, const size_t out_buffer_size); - int Convert(const Image* img, uint8_t* out_buffer, const size_t out_buffer_size, enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); - int Convert(const uint8_t* in_buffer, const size_t in_buffer_size, uint8_t* out_buffer, const size_t out_buffer_size, enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); + SWScale(); + ~SWScale(); + int SetDefaults(enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); + int ConvertDefaults(const Image* img, uint8_t* out_buffer, const size_t out_buffer_size); + int ConvertDefaults(const uint8_t* in_buffer, const size_t in_buffer_size, uint8_t* out_buffer, const size_t out_buffer_size); + int Convert(const Image* img, uint8_t* out_buffer, const size_t out_buffer_size, enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); + int Convert(const uint8_t* in_buffer, const size_t in_buffer_size, uint8_t* out_buffer, const size_t out_buffer_size, enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height); protected: - bool gotdefaults; - struct SwsContext* swscale_ctx; - AVFrame* input_avframe; - AVFrame* output_avframe; - enum _AVPIXELFORMAT default_input_pf; - enum _AVPIXELFORMAT default_output_pf; - unsigned int default_width; - unsigned int default_height; + bool gotdefaults; + struct SwsContext* swscale_ctx; + AVFrame* input_avframe; + AVFrame* output_avframe; + enum _AVPIXELFORMAT default_input_pf; + enum _AVPIXELFORMAT default_output_pf; + unsigned int default_width; + unsigned int default_height; }; #endif // HAVE_LIBSWSCALE && HAVE_LIBAVUTIL @@ -265,19 +265,19 @@ protected: */ #ifdef __cplusplus - inline static const std::string av_make_error_string(int errnum) - { - char errbuf[AV_ERROR_MAX_STRING_SIZE]; + inline static const std::string av_make_error_string(int errnum) + { + char errbuf[AV_ERROR_MAX_STRING_SIZE]; #if LIBAVUTIL_VERSION_CHECK(50, 13, 0, 13, 0) - av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE); + av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE); #else - snprintf(errbuf, AV_ERROR_MAX_STRING_SIZE, "libav error %d", errnum); + snprintf(errbuf, AV_ERROR_MAX_STRING_SIZE, "libav error %d", errnum); #endif - return (std::string)errbuf; - } + return (std::string)errbuf; + } - #undef av_err2str - #define av_err2str(errnum) av_make_error_string(errnum).c_str() + #undef av_err2str + #define av_err2str(errnum) av_make_error_string(errnum).c_str() /* The following is copied directly from newer ffmpeg */ #if LIBAVUTIL_VERSION_CHECK(52, 7, 0, 17, 100) @@ -323,5 +323,29 @@ static av_always_inline av_const int64_t av_clip64_c(int64_t a, int64_t amin, in #endif void zm_dump_stream_format(AVFormatContext *ic, int i, int index, int is_output); +#if LIBAVCODEC_VERSION_CHECK(56, 8, 0, 60, 100) + #define zm_av_packet_unref( packet ) av_packet_unref( packet ) + #define zm_av_packet_ref( dst, src ) av_packet_ref( dst, src ) +#else + #define zm_av_packet_unref( packet ) av_free_packet( packet ) +unsigned int zm_av_packet_ref( AVPacket *dst, AVPacket *src ); +#endif +#if LIBAVCODEC_VERSION_CHECK(52, 23, 0, 23, 0) + #define zm_avcodec_decode_video( context, rawFrame, frameComplete, packet ) avcodec_decode_video2( context, rawFrame, frameComplete, packet ) +#else + #define zm_avcodec_decode_video(context, rawFrame, frameComplete, packet ) avcodec_decode_video( context, rawFrame, frameComplete, packet->data, packet->size) +#endif + +#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) + #define zm_av_frame_alloc() av_frame_alloc() +#else + #define zm_av_frame_alloc() avcodec_alloc_frame() +#endif + +#if ! LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) + #define av_frame_free( input_avframe ) av_freep( input_avframe ) +#endif + +int check_sample_fmt(AVCodec *codec, enum AVSampleFormat sample_fmt); #endif // ZM_FFMPEG_H diff --git a/src/zm_ffmpeg_camera.cpp b/src/zm_ffmpeg_camera.cpp index 743da0f0c..4c7f4cff0 100644 --- a/src/zm_ffmpeg_camera.cpp +++ b/src/zm_ffmpeg_camera.cpp @@ -42,16 +42,17 @@ FfmpegCamera::FfmpegCamera( int p_id, const std::string &p_path, const std::stri mMethod( p_method ), mOptions( p_options ) { - if ( capture ) - { + if ( capture ) { Initialise(); } - + mFormatContext = NULL; mVideoStreamId = -1; mAudioStreamId = -1; - mCodecContext = NULL; - mCodec = NULL; + mVideoCodecContext = NULL; + mAudioCodecContext = NULL; + mVideoCodec = NULL; + mAudioCodec = NULL; mRawFrame = NULL; mFrame = NULL; frameCount = 0; @@ -60,9 +61,9 @@ FfmpegCamera::FfmpegCamera( int p_id, const std::string &p_path, const std::stri mCanCapture = false; mOpenStart = 0; mReopenThread = 0; - wasRecording = false; videoStore = NULL; - + video_last_pts = 0; + #if HAVE_LIBSWSCALE mConvertContext = NULL; #endif @@ -79,35 +80,35 @@ FfmpegCamera::FfmpegCamera( int p_id, const std::string &p_path, const std::stri } else { Panic("Unexpected colours: %d",colours); } - + } -FfmpegCamera::~FfmpegCamera() -{ +FfmpegCamera::~FfmpegCamera() { + + if ( videoStore ) { + delete videoStore; + } CloseFfmpeg(); - if ( capture ) - { + if ( capture ) { Terminate(); } } -void FfmpegCamera::Initialise() -{ +void FfmpegCamera::Initialise() { if ( logDebugging() ) av_log_set_level( AV_LOG_DEBUG ); else av_log_set_level( AV_LOG_QUIET ); av_register_all(); + avformat_network_init(); } -void FfmpegCamera::Terminate() -{ +void FfmpegCamera::Terminate() { } -int FfmpegCamera::PrimeCapture() -{ +int FfmpegCamera::PrimeCapture() { mVideoStreamId = -1; mAudioStreamId = -1; Info( "Priming capture from %s", mPath.c_str() ); @@ -129,107 +130,101 @@ int FfmpegCamera::Capture( Image &image ) if (!mCanCapture){ return -1; } - + // If the reopen thread has a value, but mCanCapture != 0, then we have just reopened the connection to the ffmpeg device, and we can clean up the thread. if (mReopenThread != 0) { void *retval = 0; int ret; - + ret = pthread_join(mReopenThread, &retval); if (ret != 0){ Error("Could not join reopen thread."); } - + Info( "Successfully reopened stream." ); mReopenThread = 0; } - AVPacket packet; - uint8_t* directbuffer; - - /* Request a writeable buffer of the target image */ - directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); - if(directbuffer == NULL) { - Error("Failed requesting writeable buffer for the captured image."); - return (-1); - } - int frameComplete = false; - while ( !frameComplete ) - { + while ( !frameComplete ) { int avResult = av_read_frame( mFormatContext, &packet ); - if ( avResult < 0 ) - { + if ( avResult < 0 ) { char errbuf[AV_ERROR_MAX_STRING_SIZE]; av_strerror(avResult, errbuf, AV_ERROR_MAX_STRING_SIZE); if ( - // Check if EOF. - (avResult == AVERROR_EOF || (mFormatContext->pb && mFormatContext->pb->eof_reached)) || - // Check for Connection failure. - (avResult == -110) - ) - { - Info( "av_read_frame returned \"%s\". Reopening stream.", errbuf); + // Check if EOF. + (avResult == AVERROR_EOF || (mFormatContext->pb && mFormatContext->pb->eof_reached)) || + // Check for Connection failure. + (avResult == -110) + ) { + Info( "av_read_frame returned \"%s\". Reopening stream.", errbuf ); ReopenFfmpeg(); } Error( "Unable to read packet from stream %d: error %d \"%s\".", packet.stream_index, avResult, errbuf ); return( -1 ); } - Debug( 5, "Got packet from stream %d", packet.stream_index ); + Debug( 5, "Got packet from stream %d dts (%d) pts(%d)", packet.stream_index, packet.pts, packet.dts ); // What about audio stream? Maybe someday we could do sound detection... - if ( packet.stream_index == mVideoStreamId ) - { + if ( packet.stream_index == mVideoStreamId ) { #if LIBAVCODEC_VERSION_CHECK(52, 23, 0, 23, 0) - if ( avcodec_decode_video2( mCodecContext, mRawFrame, &frameComplete, &packet ) < 0 ) + if (avcodec_decode_video2(mVideoCodecContext, mRawFrame, &frameComplete, &packet) < 0) #else - if ( avcodec_decode_video( mCodecContext, mRawFrame, &frameComplete, packet.data, packet.size ) < 0 ) + if (avcodec_decode_video(mVideoCodecContext, mRawFrame, &frameComplete, packet.data, packet.size) < 0) #endif Fatal( "Unable to decode frame at frame %d", frameCount ); Debug( 4, "Decoded video packet at frame %d", frameCount ); - if ( frameComplete ) - { - Debug( 3, "Got frame %d", frameCount ); + if ( frameComplete ) { + Debug( 4, "Got frame %d", frameCount ); + + uint8_t* directbuffer; + + /* Request a writeable buffer of the target image */ + directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); + if(directbuffer == NULL) { + Error("Failed requesting writeable buffer for the captured image."); + return (-1); + } + #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) av_image_fill_arrays(mFrame->data, mFrame->linesize, - directbuffer, imagePixFormat, width, height, 1); + directbuffer, imagePixFormat, width, height, 1); #else avpicture_fill( (AVPicture *)mFrame, directbuffer, - imagePixFormat, width, height); + imagePixFormat, width, height); #endif - -#if HAVE_LIBSWSCALE - if(mConvertContext == NULL) { - mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); - if(mConvertContext == NULL) - Fatal( "Unable to create conversion context for %s", mPath.c_str() ); - } - - if ( sws_scale( mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mCodecContext->height, mFrame->data, mFrame->linesize ) < 0 ) - Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); +#if HAVE_LIBSWSCALE + if(mConvertContext == NULL) { + mConvertContext = sws_getContext(mVideoCodecContext->width, + mVideoCodecContext->height, + mVideoCodecContext->pix_fmt, + width, height, imagePixFormat, + SWS_BICUBIC, NULL, NULL, NULL); + + if(mConvertContext == NULL) + Fatal( "Unable to create conversion context for %s", mPath.c_str() ); + } + + if (sws_scale(mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mVideoCodecContext->height, mFrame->data, mFrame->linesize) < 0) + Fatal("Unable to convert raw format %u to target format %u at frame %d", mVideoCodecContext->pix_fmt, imagePixFormat, frameCount); #else // HAVE_LIBSWSCALE - Fatal( "You must compile ffmpeg with the --enable-swscale option to use ffmpeg cameras" ); + Fatal( "You must compile ffmpeg with the --enable-swscale option to use ffmpeg cameras" ); #endif // HAVE_LIBSWSCALE - + frameCount++; - } // end if frameComplete - } else { - Debug( 4, "Different stream_index %d", packet.stream_index ); - } // end if packet.stream_index == mVideoStreamId -#if LIBAVCODEC_VERSION_CHECK(57, 8, 0, 12, 100) - av_packet_unref( &packet); -#else - av_free_packet( &packet ); -#endif + } // end if frameComplete + } else { + Debug( 4, "Different stream_index %d", packet.stream_index ); + } // end if packet.stream_index == mVideoStreamId + zm_av_packet_unref( &packet ); } // end while ! frameComplete return (0); } // FfmpegCamera::Capture -int FfmpegCamera::PostCapture() -{ +int FfmpegCamera::PostCapture() { // Nothing to do here return( 0 ); } @@ -291,8 +286,7 @@ int FfmpegCamera::OpenFfmpeg() { Debug ( 1, "Opened input" ); Info( "Stream open %s", mPath.c_str() ); - startTime=av_gettime();//FIXME here or after find_Stream_info - + //FIXME can speed up initial analysis but need sensible parameters... //mFormatContext->probesize = 32; //mFormatContext->max_analyze_duration = 32; @@ -301,24 +295,24 @@ int FfmpegCamera::OpenFfmpeg() { Debug ( 1, "Calling av_find_stream_info" ); if ( av_find_stream_info( mFormatContext ) < 0 ) #else - Debug ( 1, "Calling avformat_find_stream_info" ); + Debug ( 1, "Calling avformat_find_stream_info" ); if ( avformat_find_stream_info( mFormatContext, 0 ) < 0 ) #endif Fatal( "Unable to find stream info from %s due to: %s", mPath.c_str(), strerror(errno) ); + startTime = av_gettime();//FIXME here or after find_Stream_info Debug ( 1, "Got stream info" ); // Find first video stream present + // The one we want Might not be the first mVideoStreamId = -1; mAudioStreamId = -1; - for (unsigned int i=0; i < mFormatContext->nb_streams; i++ ) - { + for (unsigned int i=0; i < mFormatContext->nb_streams; i++ ) { #if (LIBAVCODEC_VERSION_CHECK(52, 64, 0, 64, 0) || LIBAVUTIL_VERSION_CHECK(50, 14, 0, 14, 0)) - if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO ) + if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO ) { #else - if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO ) + if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO ) { #endif - { if ( mVideoStreamId == -1 ) { mVideoStreamId = i; // if we break, then we won't find the audio stream @@ -328,59 +322,74 @@ int FfmpegCamera::OpenFfmpeg() { } } #if (LIBAVCODEC_VERSION_CHECK(52, 64, 0, 64, 0) || LIBAVUTIL_VERSION_CHECK(50, 14, 0, 14, 0)) - if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO ) + if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO ) { #else - if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO ) + if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO ) { #endif - { if ( mAudioStreamId == -1 ) { mAudioStreamId = i; } else { Debug(2, "Have another audio stream." ); } - } - } + } // end foreach stream if ( mVideoStreamId == -1 ) Fatal( "Unable to locate video stream in %s", mPath.c_str() ); if ( mAudioStreamId == -1 ) Debug( 3, "Unable to locate audio stream in %s", mPath.c_str() ); - Debug ( 1, "Found video stream" ); + Debug ( 3, "Found video stream at index %d", mVideoStreamId ); + Debug ( 3, "Found audio stream at index %d", mAudioStreamId ); - mCodecContext = mFormatContext->streams[mVideoStreamId]->codec; + mVideoCodecContext = mFormatContext->streams[mVideoStreamId]->codec; + // STolen from ispy + //this fixes issues with rtsp streams!! woot. + //mVideoCodecContext->flags2 |= CODEC_FLAG2_FAST | CODEC_FLAG2_CHUNKS | CODEC_FLAG_LOW_DELAY; // Enable faster H264 decode. + mVideoCodecContext->flags2 |= CODEC_FLAG2_FAST | CODEC_FLAG_LOW_DELAY; // Try and get the codec from the codec context - if ( (mCodec = avcodec_find_decoder( mCodecContext->codec_id )) == NULL ) - Fatal( "Can't find codec for video stream from %s", mPath.c_str() ); - - Debug ( 1, "Found decoder" ); - + if ((mVideoCodec = avcodec_find_decoder(mVideoCodecContext->codec_id)) == NULL) { + Fatal("Can't find codec for video stream from %s", mPath.c_str()); + } else { + Debug(1, "Video Found decoder"); + zm_dump_stream_format(mFormatContext, mVideoStreamId, 0, 0); // Open the codec #if !LIBAVFORMAT_VERSION_CHECK(53, 8, 0, 8, 0) Debug ( 1, "Calling avcodec_open" ); - if ( avcodec_open( mCodecContext, mCodec ) < 0 ) + if (avcodec_open(mVideoCodecContext, mVideoCodec) < 0) #else - Debug ( 1, "Calling avcodec_open2" ); - if ( avcodec_open2( mCodecContext, mCodec, 0 ) < 0 ) + Debug ( 1, "Calling avcodec_open2" ); + if (avcodec_open2(mVideoCodecContext, mVideoCodec, 0) < 0) #endif Fatal( "Unable to open codec for video stream from %s", mPath.c_str() ); + } + + if (mAudioStreamId >= 0) { + mAudioCodecContext = mFormatContext->streams[mAudioStreamId]->codec; + if ((mAudioCodec = avcodec_find_decoder(mAudioCodecContext->codec_id)) == NULL) { + Debug(1, "Can't find codec for audio stream from %s", mPath.c_str()); + } else { + Debug(1, "Audio Found decoder"); + zm_dump_stream_format(mFormatContext, mAudioStreamId, 0, 0); + // Open the codec +#if !LIBAVFORMAT_VERSION_CHECK(53, 8, 0, 8, 0) + Debug ( 1, "Calling avcodec_open" ); + if (avcodec_open(mAudioCodecContext, mAudioCodec) < 0) +#else + Debug ( 1, "Calling avcodec_open2" ); + if (avcodec_open2(mAudioCodecContext, mAudioCodec, 0) < 0) +#endif + Fatal( "Unable to open codec for video stream from %s", mPath.c_str() ); + } + } Debug ( 1, "Opened codec" ); // Allocate space for the native video frame -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) - mRawFrame = av_frame_alloc(); -#else - mRawFrame = avcodec_alloc_frame(); -#endif + mRawFrame = zm_av_frame_alloc(); // Allocate space for the converted video frame -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) - mFrame = av_frame_alloc(); -#else - mFrame = avcodec_alloc_frame(); -#endif + mFrame = zm_av_frame_alloc(); if(mRawFrame == NULL || mFrame == NULL) Fatal( "Unable to allocate frame for %s", mPath.c_str() ); @@ -398,21 +407,33 @@ int FfmpegCamera::OpenFfmpeg() { } Debug ( 1, "Validated imagesize" ); - + #if HAVE_LIBSWSCALE Debug ( 1, "Calling sws_isSupportedInput" ); - if(!sws_isSupportedInput(mCodecContext->pix_fmt)) { - Fatal("swscale does not support the codec format: %c%c%c%c",(mCodecContext->pix_fmt)&0xff,((mCodecContext->pix_fmt>>8)&0xff),((mCodecContext->pix_fmt>>16)&0xff),((mCodecContext->pix_fmt>>24)&0xff)); + if (!sws_isSupportedInput(mVideoCodecContext->pix_fmt)) { + Fatal("swscale does not support the codec format: %c%c%c%c", (mVideoCodecContext->pix_fmt)&0xff, ((mVideoCodecContext->pix_fmt >> 8)&0xff), ((mVideoCodecContext->pix_fmt >> 16)&0xff), ((mVideoCodecContext->pix_fmt >> 24)&0xff)); } - + if(!sws_isSupportedOutput(imagePixFormat)) { Fatal("swscale does not support the target format: %c%c%c%c",(imagePixFormat)&0xff,((imagePixFormat>>8)&0xff),((imagePixFormat>>16)&0xff),((imagePixFormat>>24)&0xff)); } - + + mConvertContext = sws_getContext(mVideoCodecContext->width, + mVideoCodecContext->height, + mVideoCodecContext->pix_fmt, + width, height, + imagePixFormat, SWS_BICUBIC, NULL, + NULL, NULL); + if ( mConvertContext == NULL ) + Fatal( "Unable to create conversion context for %s", mPath.c_str() ); #else // HAVE_LIBSWSCALE Fatal( "You must compile ffmpeg with the --enable-swscale option to use ffmpeg cameras" ); #endif // HAVE_LIBSWSCALE + if ( (unsigned int)mVideoCodecContext->width != width || (unsigned int)mVideoCodecContext->height != height ) { + Warning( "Monitor dimensions are %dx%d but camera is sending %dx%d", width, height, mVideoCodecContext->width, mVideoCodecContext->height ); + } + mCanCapture = true; return 0; @@ -437,14 +458,9 @@ int FfmpegCamera::CloseFfmpeg(){ mCanCapture = false; -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) av_frame_free( &mFrame ); av_frame_free( &mRawFrame ); -#else - av_freep( &mFrame ); - av_freep( &mRawFrame ); -#endif - + #if HAVE_LIBSWSCALE if ( mConvertContext ) { @@ -453,13 +469,16 @@ int FfmpegCamera::CloseFfmpeg(){ } #endif - if ( mCodecContext ) - { - avcodec_close( mCodecContext ); - mCodecContext = NULL; // Freed by av_close_input_file + if (mVideoCodecContext) { + avcodec_close(mVideoCodecContext); + mVideoCodecContext = NULL; // Freed by av_close_input_file } - if ( mFormatContext ) - { + if (mAudioCodecContext) { + avcodec_close(mAudioCodecContext); + mAudioCodecContext = NULL; // Freed by av_close_input_file + } + + if ( mFormatContext ) { #if !LIBAVFORMAT_VERSION_CHECK(53, 17, 0, 25, 0) av_close_input_file( mFormatContext ); #else @@ -509,211 +528,274 @@ void *FfmpegCamera::ReopenFfmpegThreadCallback(void *ctx){ } //Function to handle capture and store -int FfmpegCamera::CaptureAndRecord( Image &image, bool recording, char* event_file ){ +int FfmpegCamera::CaptureAndRecord( Image &image, timeval recording, char* event_file ) { if (!mCanCapture){ return -1; } + int ret; + static char errbuf[AV_ERROR_MAX_STRING_SIZE]; // If the reopen thread has a value, but mCanCapture != 0, then we have just reopened the connection to the ffmpeg device, and we can clean up the thread. if (mReopenThread != 0) { void *retval = 0; - int ret; - + ret = pthread_join(mReopenThread, &retval); if (ret != 0){ Error("Could not join reopen thread."); } - + Info( "Successfully reopened stream." ); mReopenThread = 0; } - AVPacket packet; - uint8_t* directbuffer; - - /* Request a writeable buffer of the target image */ - directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); - if( directbuffer == NULL ) { - Error("Failed requesting writeable buffer for the captured image."); - return (-1); - } - - if ( mCodecContext->codec_id != AV_CODEC_ID_H264 ) { + if (mVideoCodecContext->codec_id != AV_CODEC_ID_H264) { Error( "Input stream is not h264. The stored event file may not be viewable in browser." ); } int frameComplete = false; - while ( !frameComplete ) { - int avResult = av_read_frame( mFormatContext, &packet ); - if ( avResult < 0 ) { - char errbuf[AV_ERROR_MAX_STRING_SIZE]; - av_strerror(avResult, errbuf, AV_ERROR_MAX_STRING_SIZE); + while ( ! frameComplete ) { + av_init_packet( &packet ); + + ret = av_read_frame( mFormatContext, &packet ); + if ( ret < 0 ) { + av_strerror( ret, errbuf, AV_ERROR_MAX_STRING_SIZE ); if ( // Check if EOF. - (avResult == AVERROR_EOF || (mFormatContext->pb && mFormatContext->pb->eof_reached)) || + (ret == AVERROR_EOF || (mFormatContext->pb && mFormatContext->pb->eof_reached)) || // Check for Connection failure. - (avResult == -110) - ) { + (ret == -110) + ) { Info( "av_read_frame returned \"%s\". Reopening stream.", errbuf); ReopenFfmpeg(); } - Error( "Unable to read packet from stream %d: error %d \"%s\".", packet.stream_index, avResult, errbuf ); + Error( "Unable to read packet from stream %d: error %d \"%s\".", packet.stream_index, ret, errbuf ); return( -1 ); } - Debug( 5, "Got packet from stream %d", packet.stream_index ); - if ( packet.stream_index == mVideoStreamId ) { -#if LIBAVCODEC_VERSION_CHECK(52, 23, 0, 23, 0) - if ( avcodec_decode_video2( mCodecContext, mRawFrame, &frameComplete, &packet ) < 0 ) -#else - if ( avcodec_decode_video( mCodecContext, mRawFrame, &frameComplete, packet.data, packet.size ) < 0 ) + + int key_frame = packet.flags & AV_PKT_FLAG_KEY; + + Debug( 4, "Got packet from stream %d packet pts (%d) dts(%d), key?(%d)", + packet.stream_index, packet.pts, packet.dts, + key_frame + ); + + //Video recording + if ( recording.tv_sec ) { + // The directory we are recording to is no longer tied to the current event. + // Need to re-init the videostore with the correct directory and start recording again + // for efficiency's sake, we should test for keyframe before we test for directory change... + if ( videoStore && key_frame && (strcmp(oldDirectory, event_file) != 0 ) ) { + // don't open new videostore until we're on a key frame..would this require an offset adjustment for the event as a result?... + // if we store our key frame location with the event will that be enough? + Info("Re-starting video storage module"); + + // I don't know if this is important or not... but I figure we might as well write this last packet out to the store before closing it. + // Also don't know how much it matters for audio. + if ( packet.stream_index == mVideoStreamId ) { + //Write the packet to our video store + int ret = videoStore->writeVideoFramePacket( &packet ); + if ( ret < 0 ) { //Less than zero and we skipped a frame + Warning("Error writing last packet to videostore."); + } + } // end if video + + delete videoStore; + videoStore = NULL; + } // end if end of recording + + if ( ( ! videoStore ) && key_frame && ( packet.stream_index == mVideoStreamId ) ) { + //Instantiate the video storage module + + if (record_audio) { + if (mAudioStreamId == -1) { + Debug(3, "Record Audio on but no audio stream found"); + videoStore = new VideoStore((const char *) event_file, "mp4", + mFormatContext->streams[mVideoStreamId], + NULL, + startTime, + this->getMonitor()); + + } else { + Debug(3, "Video module initiated with audio stream"); + videoStore = new VideoStore((const char *) event_file, "mp4", + mFormatContext->streams[mVideoStreamId], + mFormatContext->streams[mAudioStreamId], + startTime, + this->getMonitor()); + } + } else { + Debug(3, "Record_audio is false so exclude audio stream"); + videoStore = new VideoStore((const char *) event_file, "mp4", + mFormatContext->streams[mVideoStreamId], + NULL, + startTime, + this->getMonitor()); + } // end if record_audio + strcpy(oldDirectory, event_file); + + // Need to write out all the frames from the last keyframe? + // No... need to write out all frames from when the event began. Due to PreEventFrames, this could be more than since the last keyframe. + unsigned int packet_count = 0; + ZMPacket *queued_packet; + + packetqueue.clear_unwanted_packets( &recording, mVideoStreamId ); + + while ( ( queued_packet = packetqueue.popPacket() ) ) { + AVPacket *avp = queued_packet->av_packet(); + + packet_count += 1; + //Write the packet to our video store + Debug(2, "Writing queued packet stream: %d KEY %d, remaining (%d)", avp->stream_index, avp->flags & AV_PKT_FLAG_KEY, packetqueue.size() ); + if ( avp->stream_index == mVideoStreamId ) { + ret = videoStore->writeVideoFramePacket( avp ); + } else if ( avp->stream_index == mAudioStreamId ) { + ret = videoStore->writeAudioFramePacket( avp ); + } else { + Warning("Unknown stream id in queued packet (%d)", avp->stream_index ); + ret = -1; + } + if ( ret < 0 ) { + //Less than zero and we skipped a frame + } + delete queued_packet; + } // end while packets in the packetqueue + Debug(2, "Wrote %d queued packets", packet_count ); + } // end if ! was recording + + } else { + // Not recording + if ( videoStore ) { + Info("Deleting videoStore instance"); + delete videoStore; + videoStore = NULL; + } + + // Buffer video packets, since we are not recording. + // All audio packets are keyframes, so only if it's a video keyframe + if ( packet.stream_index == mVideoStreamId) { + if ( key_frame ) { + Debug(3, "Clearing queue"); + packetqueue.clearQueue( monitor->GetPreEventCount(), mVideoStreamId ); + } +#if 0 +// Not sure this is valid. While a camera will PROBABLY always have an increasing pts... it doesn't have to. +// Also, I think there are integer wrap-around issues. + +else if ( packet.pts && video_last_pts > packet.pts ) { + Warning( "Clearing queue due to out of order pts packet.pts(%d) < video_last_pts(%d)"); + packetqueue.clearQueue(); + } #endif - { - Error( "Unable to decode frame at frame %d, continuing...", frameCount ); - av_free_packet( &packet ); + } + + if ( + ( packet.stream_index != mAudioStreamId || record_audio ) + && + ( key_frame || packetqueue.size() ) + ) { + packetqueue.queuePacket( &packet ); + } + } // end if recording or not + + if ( packet.stream_index == mVideoStreamId ) { + if ( videoStore ) { + //Write the packet to our video store + int ret = videoStore->writeVideoFramePacket( &packet ); + if ( ret < 0 ) { //Less than zero and we skipped a frame + zm_av_packet_unref( &packet ); + return 0; + } + } + Debug(4, "about to decode video" ); + +#if LIBAVCODEC_VERSION_CHECK(58, 0, 0, 0, 0) + ret = avcodec_send_packet( mVideoCodecContext, &packet ); + if ( ret < 0 ) { + av_strerror( ret, errbuf, AV_ERROR_MAX_STRING_SIZE ); + Error( "Unable to send packet at frame %d: %s, continuing", frameCount, errbuf ); + zm_av_packet_unref( &packet ); continue; } + ret = avcodec_receive_frame( mVideoCodecContext, mRawFrame ); + if ( ret < 0 ) { + av_strerror( ret, errbuf, AV_ERROR_MAX_STRING_SIZE ); + Error( "Unable to send packet at frame %d: %s, continuing", frameCount, errbuf ); + zm_av_packet_unref( &packet ); + continue; + } + frameComplete = 1; +# else + ret = zm_avcodec_decode_video( mVideoCodecContext, mRawFrame, &frameComplete, &packet ); + if ( ret < 0 ) { + av_strerror( ret, errbuf, AV_ERROR_MAX_STRING_SIZE ); + Error( "Unable to decode frame at frame %d: %s, continuing", frameCount, errbuf ); + zm_av_packet_unref( &packet ); + continue; + } +#endif - Debug( 4, "Decoded video packet at frame %d", frameCount ); + Debug( 4, "Decoded video packet at frame %d", frameCount ); - if ( frameComplete ) { - Debug( 3, "Got frame %d", frameCount ); - - avpicture_fill( (AVPicture *)mFrame, directbuffer, imagePixFormat, width, height); + if ( frameComplete ) { + Debug( 4, "Got frame %d", frameCount ); - //Keep the last keyframe so we can establish immediate video - /*if(packet.flags & AV_PKT_FLAG_KEY) - av_copy_packet(&lastKeyframePkt, &packet);*/ - //TODO I think we need to store the key frame location for seeking as part of the event - - //Video recording - if ( recording && !wasRecording ) { - //Instantiate the video storage module + uint8_t* directbuffer; - if (record_audio) { - if (mAudioStreamId == -1) { - Debug(3, "Record Audio on but no audio stream found"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - NULL, - startTime, - this->getMonitor()->getOrientation()); - - } else { - Debug(3, "Video module initiated with audio stream"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - mFormatContext->streams[mAudioStreamId], - startTime, - this->getMonitor()->getOrientation()); - } - } else { - Debug(3, "Record_audio is false so exclude audio stream"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - NULL, - startTime, - this->getMonitor()->getOrientation()); - } - wasRecording = true; - strcpy(oldDirectory, event_file); - - } else if ( ( ! recording ) && wasRecording && videoStore ) { - Info("Deleting videoStore instance"); - delete videoStore; - videoStore = NULL; - } - - // The directory we are recording to is no longer tied to the current - // event. Need to re-init the videostore with the correct directory and - // start recording again - if (recording && wasRecording && (strcmp(oldDirectory, event_file) != 0) - && (packet.flags & AV_PKT_FLAG_KEY)) { - // Don't open new videostore until we're on a key frame..would this - // require an offset adjustment for the event as a result?...if we store - // our key frame location with the event will that be enough? - Info("Re-starting video storage module"); - if(videoStore){ - delete videoStore; - videoStore = NULL; - } - - if (record_audio) { - if (mAudioStreamId == -1) { - Debug(3, "Record Audio on but no audio stream found"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - NULL, - startTime, - this->getMonitor()->getOrientation()); - } else { - Debug(3, "Video module initiated with audio stream"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - mFormatContext->streams[mAudioStreamId], - startTime, - this->getMonitor()->getOrientation()); - } - } else { - Debug(3, "Record_audio is false so exclude audio stream"); - videoStore = new VideoStore((const char *) event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - NULL, startTime, - this->getMonitor()->getOrientation()); - } - strcpy(oldDirectory, event_file); - } - - if ( videoStore && recording ) { - //Write the packet to our video store - int ret = videoStore->writeVideoFramePacket(&packet, - mFormatContext->streams[mVideoStreamId]); //, &lastKeyframePkt); - if(ret<0){//Less than zero and we skipped a frame - av_free_packet( &packet ); - return 0; - } - } - -#if HAVE_LIBSWSCALE - if ( mConvertContext == NULL ) { - mConvertContext = sws_getContext(mCodecContext->width, - mCodecContext->height, - mCodecContext->pix_fmt, - width, height, - imagePixFormat, SWS_BICUBIC, NULL, - NULL, NULL); - if ( mConvertContext == NULL ) - Fatal( "Unable to create conversion context for %s", mPath.c_str() ); + /* Request a writeable buffer of the target image */ + directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); + if ( directbuffer == NULL ) { + Error("Failed requesting writeable buffer for the captured image."); + zm_av_packet_unref( &packet ); + return (-1); } +#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) + av_image_fill_arrays(mFrame->data, mFrame->linesize, directbuffer, imagePixFormat, width, height, 1); +#else + avpicture_fill( (AVPicture *)mFrame, directbuffer, imagePixFormat, width, height); +#endif + if (sws_scale(mConvertContext, mRawFrame->data, mRawFrame->linesize, - 0, mCodecContext->height, mFrame->data, mFrame->linesize) < 0) + 0, mVideoCodecContext->height, mFrame->data, mFrame->linesize) < 0) { Fatal("Unable to convert raw format %u to target format %u at frame %d", - mCodecContext->pix_fmt, imagePixFormat, frameCount); -#else // HAVE_LIBSWSCALE - Fatal( "You must compile ffmpeg with the --enable-swscale option to use ffmpeg cameras" ); -#endif // HAVE_LIBSWSCALE + mVideoCodecContext->pix_fmt, imagePixFormat, frameCount); + } - frameCount++; - } // end if frameComplete - } else if ( packet.stream_index == mAudioStreamId ) { //FIXME best way to copy all other streams - if ( videoStore && recording ) { - if ( record_audio ) { - Debug(4, "Recording audio packet" ); - //Write the packet to our video store - int ret = videoStore->writeAudioFramePacket(&packet, - mFormatContext->streams[packet.stream_index]); //FIXME no relevance of last key frame - if ( ret < 0 ) {//Less than zero and we skipped a frame - av_free_packet( &packet ); - return 0; - } - } else { - Debug(4, "Not recording audio packet" ); + frameCount++; + } else { + Debug( 3, "Not framecomplete after av_read_frame" ); + } // end if frameComplete + } else if ( packet.stream_index == mAudioStreamId ) { //FIXME best way to copy all other streams + if ( videoStore ) { + if ( record_audio ) { + Debug(3, "Recording audio packet streamindex(%d) packetstreamindex(%d)", mAudioStreamId, packet.stream_index ); + //Write the packet to our video store + //FIXME no relevance of last key frame + int ret = videoStore->writeAudioFramePacket( &packet ); + if ( ret < 0 ) {//Less than zero and we skipped a frame + Warning("Failure to write audio packet."); + zm_av_packet_unref( &packet ); + return 0; } + } else { + Debug(4, "Not recording audio packet" ); } } - av_free_packet( &packet ); - } // end while ! frameComplete - return (frameCount); -} + } else { +#if LIBAVUTIL_VERSION_CHECK(56, 23, 0, 23, 0) + Debug( 3, "Some other stream index %d, %s", packet.stream_index, av_get_media_type_string( mFormatContext->streams[packet.stream_index]->codecpar->codec_type) ); +#else + Debug( 3, "Some other stream index %d", packet.stream_index ); +#endif + } + //if ( videoStore ) { + + // the packet contents are ref counted... when queuing, we allocate another packet and reference it with that one, so we should always need to unref here, which should not affect the queued version. + zm_av_packet_unref( &packet ); + //} + } // end while ! frameComplete + return (frameCount); +} // end FfmpegCamera::CaptureAndRecord + #endif // HAVE_LIBAVFORMAT diff --git a/src/zm_ffmpeg_camera.h b/src/zm_ffmpeg_camera.h index 45b304498..d472909db 100644 --- a/src/zm_ffmpeg_camera.h +++ b/src/zm_ffmpeg_camera.h @@ -26,68 +26,81 @@ //#include "zm_utils.h" #include "zm_ffmpeg.h" #include "zm_videostore.h" +#include "zm_packetqueue.h" // // Class representing 'ffmpeg' cameras, i.e. those which are // accessed using ffmpeg multimedia framework // -class FfmpegCamera : public Camera -{ -protected: - std::string mPath; - std::string mMethod; - std::string mOptions; +class FfmpegCamera : public Camera { + protected: + std::string mPath; + std::string mMethod; + std::string mOptions; - int frameCount; + int frameCount; #if HAVE_LIBAVFORMAT - AVFormatContext *mFormatContext; - int mVideoStreamId; - int mAudioStreamId; - AVCodecContext *mCodecContext; - AVCodec *mCodec; - AVFrame *mRawFrame; - AVFrame *mFrame; - _AVPIXELFORMAT imagePixFormat; + AVFormatContext *mFormatContext; + int mVideoStreamId; + int mAudioStreamId; + AVCodecContext *mVideoCodecContext; + AVCodecContext *mAudioCodecContext; + AVCodec *mVideoCodec; + AVCodec *mAudioCodec; + AVFrame *mRawFrame; + AVFrame *mFrame; + _AVPIXELFORMAT imagePixFormat; - int OpenFfmpeg(); - int ReopenFfmpeg(); - int CloseFfmpeg(); - static int FfmpegInterruptCallback(void *ctx); - static void* ReopenFfmpegThreadCallback(void *ctx); - bool mIsOpening; - bool mCanCapture; - int mOpenStart; - pthread_t mReopenThread; + // Need to keep track of these because apparently the stream can start with values for pts/dts and then subsequent packets start at zero. + int64_t audio_last_pts; + int64_t audio_last_dts; + int64_t video_last_pts; + int64_t video_last_dts; + + // Used to store the incoming packet, it will get copied when queued. + // We only ever need one at a time, so instead of constantly allocating + // and freeing this structure, we will just make it a member of the object. + AVPacket packet; + + int OpenFfmpeg(); + int ReopenFfmpeg(); + int CloseFfmpeg(); + static int FfmpegInterruptCallback(void *ctx); + static void* ReopenFfmpegThreadCallback(void *ctx); + bool mIsOpening; + bool mCanCapture; + int mOpenStart; + pthread_t mReopenThread; #endif // HAVE_LIBAVFORMAT - bool wasRecording; - VideoStore *videoStore; - char oldDirectory[4096]; - //AVPacket lastKeyframePkt; + VideoStore *videoStore; + char oldDirectory[4096]; + unsigned int old_event_id; + zm_packetqueue packetqueue; #if HAVE_LIBSWSCALE - struct SwsContext *mConvertContext; + struct SwsContext *mConvertContext; #endif - int64_t startTime; + int64_t startTime; -public: - FfmpegCamera( int p_id, const std::string &path, const std::string &p_method, const std::string &p_options, int p_width, int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); - ~FfmpegCamera(); + public: + FfmpegCamera( int p_id, const std::string &path, const std::string &p_method, const std::string &p_options, int p_width, int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture, bool p_record_audio ); + ~FfmpegCamera(); - const std::string &Path() const { return( mPath ); } - const std::string &Options() const { return( mOptions ); } - const std::string &Method() const { return( mMethod ); } + const std::string &Path() const { return( mPath ); } + const std::string &Options() const { return( mOptions ); } + const std::string &Method() const { return( mMethod ); } - void Initialise(); - void Terminate(); + void Initialise(); + void Terminate(); - int PrimeCapture(); - int PreCapture(); - int Capture( Image &image ); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ); - int PostCapture(); + int PrimeCapture(); + int PreCapture(); + int Capture( Image &image ); + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ); + int PostCapture(); }; #endif // ZM_FFMPEG_CAMERA_H diff --git a/src/zm_file_camera.cpp b/src/zm_file_camera.cpp index 907a70bbc..b77628963 100644 --- a/src/zm_file_camera.cpp +++ b/src/zm_file_camera.cpp @@ -87,5 +87,5 @@ int FileCamera::Capture( Image &image ) int FileCamera::PostCapture() { - return( 0 ); + return( 0 ); } diff --git a/src/zm_file_camera.h b/src/zm_file_camera.h index 97e7d0e39..6ad911755 100644 --- a/src/zm_file_camera.h +++ b/src/zm_file_camera.h @@ -23,6 +23,7 @@ #include "zm_camera.h" #include "zm_buffer.h" #include "zm_regexp.h" +#include "zm_packetqueue.h" #include @@ -46,7 +47,7 @@ public: int PreCapture(); int Capture( Image &image ); int PostCapture(); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ) {return(0);}; + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ) {return(0);}; }; #endif // ZM_FILE_CAMERA_H diff --git a/src/zm_image.cpp b/src/zm_image.cpp index d6905d1f0..c6e54d20a 100644 --- a/src/zm_image.cpp +++ b/src/zm_image.cpp @@ -74,8 +74,7 @@ static deinterlace_4field_fptr_t fptr_deinterlace_4field_gray8; /* Pointer to image buffer memory copy function */ imgbufcpy_fptr_t fptr_imgbufcpy; -Image::Image() -{ +Image::Image() { if ( !initialised ) Initialise(); width = 0; @@ -91,8 +90,7 @@ Image::Image() text[0] = '\0'; } -Image::Image( const char *filename ) -{ +Image::Image( const char *filename ) { if ( !initialised ) Initialise(); width = 0; @@ -158,14 +156,14 @@ Image::~Image() { /* Should be called as part of program shutdown to free everything */ void Image::Deinitialise() { if ( initialised ) { - /* - delete[] y_table; - delete[] uv_table; - delete[] r_v_table; - delete[] g_v_table; - delete[] g_u_table; - delete[] b_u_table; - */ + /* + delete[] y_table; + delete[] uv_table; + delete[] r_v_table; + delete[] g_v_table; + delete[] g_u_table; + delete[] b_u_table; + */ initialised = false; if ( readjpg_dcinfo ) { jpeg_destroy_decompress( readjpg_dcinfo ); @@ -243,10 +241,10 @@ void Image::Initialise() Panic("Blend function failed self-test: Results differ from the expected results. Column %u Expected %u Got %u",i,blendexp[i],blendres[i]); } } - + fptr_delta8_rgb = &std_delta8_rgb; fptr_delta8_bgr = &std_delta8_bgr; - + /* Assign the delta functions */ if(config.cpu_extensions) { if(sseversion >= 35) { @@ -372,10 +370,10 @@ void Image::Initialise() fptr_imgbufcpy = &memcpy; Debug(4,"Image buffer copy: Using standard memcpy"); #endif - + /* Code below relocated from zm_local_camera */ Debug( 3, "Setting up static colour tables" ); - + y_table = y_table_global; uv_table = uv_table_global; r_v_table = r_v_table_global; @@ -383,63 +381,63 @@ void Image::Initialise() g_u_table = g_u_table_global; b_u_table = b_u_table_global; /* - y_table = new unsigned char[256]; - for ( int i = 0; i <= 255; i++ ) - { - unsigned char c = i; - if ( c <= 16 ) - y_table[c] = 0; - else if ( c >= 235 ) - y_table[c] = 255; - else - y_table[c] = (255*(c-16))/219; - } + y_table = new unsigned char[256]; + for ( int i = 0; i <= 255; i++ ) + { + unsigned char c = i; + if ( c <= 16 ) + y_table[c] = 0; + else if ( c >= 235 ) + y_table[c] = 255; + else + y_table[c] = (255*(c-16))/219; + } - uv_table = new signed char[256]; - for ( int i = 0; i <= 255; i++ ) - { - unsigned char c = i; - if ( c <= 16 ) - uv_table[c] = -127; - else if ( c >= 240 ) - uv_table[c] = 127; - else - uv_table[c] = (127*(c-128))/112; - } + uv_table = new signed char[256]; + for ( int i = 0; i <= 255; i++ ) + { + unsigned char c = i; + if ( c <= 16 ) + uv_table[c] = -127; + else if ( c >= 240 ) + uv_table[c] = 127; + else + uv_table[c] = (127*(c-128))/112; + } - r_v_table = new short[255]; - g_v_table = new short[255]; - g_u_table = new short[255]; - b_u_table = new short[255]; - for ( int i = 0; i < 255; i++ ) - { - r_v_table[i] = (1402*(i-128))/1000; - g_u_table[i] = (344*(i-128))/1000; - g_v_table[i] = (714*(i-128))/1000; - b_u_table[i] = (1772*(i-128))/1000; - } + r_v_table = new short[255]; + g_v_table = new short[255]; + g_u_table = new short[255]; + b_u_table = new short[255]; + for ( int i = 0; i < 255; i++ ) + { + r_v_table[i] = (1402*(i-128))/1000; + g_u_table[i] = (344*(i-128))/1000; + g_v_table[i] = (714*(i-128))/1000; + b_u_table[i] = (1772*(i-128))/1000; + } */ - + initialised = true; } /* Requests a writeable buffer to the image. This is safer than buffer() because this way we can guarantee that a buffer of required size exists */ uint8_t* Image::WriteBuffer(const unsigned int p_width, const unsigned int p_height, const unsigned int p_colours, const unsigned int p_subpixelorder) { unsigned int newsize; - + if(p_colours != ZM_COLOUR_GRAY8 && p_colours != ZM_COLOUR_RGB24 && p_colours != ZM_COLOUR_RGB32) { Error("WriteBuffer called with unexpected colours: %d",p_colours); return NULL; } - + if(!p_height || !p_width) { Error("WriteBuffer called with invalid width or height: %d %d",p_width,p_height); return NULL; } - + if(p_width != width || p_height != height || p_colours != colours || p_subpixelorder != subpixelorder) { newsize = (p_width * p_height) * p_colours; - + if(buffer == NULL) { AllocImgBuffer(newsize); } else { @@ -454,7 +452,7 @@ uint8_t* Image::WriteBuffer(const unsigned int p_width, const unsigned int p_hei } } } - + width = p_width; height = p_height; colours = p_colours; @@ -462,9 +460,9 @@ uint8_t* Image::WriteBuffer(const unsigned int p_width, const unsigned int p_hei pixels = height*width; size = newsize; } - + return buffer; - + } /* Assign an existing buffer to the image instead of copying from a source buffer. The goal is to reduce the amount of memory copying and increase efficiency and buffer reusing. */ @@ -485,12 +483,12 @@ void Image::AssignDirect( const unsigned int p_width, const unsigned int p_heigh } unsigned int new_buffer_size = ((p_width*p_height)*p_colours); - + if(buffer_size < new_buffer_size) { Error("Attempt to directly assign buffer from an undersized buffer of size: %zu, needed %dx%d*%d colours = %zu",buffer_size, p_width, p_height, p_colours, new_buffer_size ); return; } - + if(holdbuffer && buffer) { if(new_buffer_size > allocation) { Error("Held buffer is undersized for assigned buffer"); @@ -502,55 +500,55 @@ void Image::AssignDirect( const unsigned int p_width, const unsigned int p_heigh subpixelorder = p_subpixelorder; pixels = height*width; size = new_buffer_size; // was pixels*colours, but we already calculated it above as new_buffer_size - + /* Copy into the held buffer */ if(new_buffer != buffer) (*fptr_imgbufcpy)(buffer, new_buffer, size); - + /* Free the new buffer */ DumpBuffer(new_buffer, p_buffertype); } } else { /* Free an existing buffer if any */ DumpImgBuffer(); - + width = p_width; height = p_height; colours = p_colours; subpixelorder = p_subpixelorder; pixels = height*width; size = new_buffer_size; // was pixels*colours, but we already calculated it above as new_buffer_size - + allocation = buffer_size; buffertype = p_buffertype; buffer = new_buffer; } - + } void Image::Assign(const unsigned int p_width, const unsigned int p_height, const unsigned int p_colours, const unsigned int p_subpixelorder, const uint8_t* new_buffer, const size_t buffer_size) { unsigned int new_size = (p_width * p_height) * p_colours; - + if(new_buffer == NULL) { Error("Attempt to assign buffer from a NULL pointer"); return; } - + if(buffer_size < new_size) { Error("Attempt to assign buffer from an undersized buffer of size: %zu",buffer_size); return; } - + if(!p_height || !p_width) { Error("Attempt to assign buffer with invalid width or height: %d %d",p_width,p_height); return; } - + if(p_colours != ZM_COLOUR_GRAY8 && p_colours != ZM_COLOUR_RGB24 && p_colours != ZM_COLOUR_RGB32) { Error("Attempt to assign buffer with unexpected colours per pixel: %d",p_colours); return; } - + if ( !buffer || p_width != width || p_height != height || p_colours != colours || p_subpixelorder != subpixelorder) { if (holdbuffer && buffer) { @@ -564,7 +562,7 @@ void Image::Assign(const unsigned int p_width, const unsigned int p_height, cons AllocImgBuffer(new_size); } } - + width = p_width; height = p_height; pixels = width*height; @@ -572,25 +570,25 @@ void Image::Assign(const unsigned int p_width, const unsigned int p_height, cons subpixelorder = p_subpixelorder; size = new_size; } - + if(new_buffer != buffer) (*fptr_imgbufcpy)(buffer, new_buffer, size); - + } void Image::Assign( const Image &image ) { unsigned int new_size = (image.width * image.height) * image.colours; - + if(image.buffer == NULL) { Error("Attempt to assign image with an empty buffer"); return; } - + if(image.colours != ZM_COLOUR_GRAY8 && image.colours != ZM_COLOUR_RGB24 && image.colours != ZM_COLOUR_RGB32) { Error("Attempt to assign image with unexpected colours per pixel: %d",image.colours); return; } - + if ( !buffer || image.width != width || image.height != height || image.colours != colours || image.subpixelorder != subpixelorder) { if (holdbuffer && buffer) { @@ -604,7 +602,7 @@ void Image::Assign( const Image &image ) { AllocImgBuffer(new_size); } } - + width = image.width; height = image.height; pixels = width*height; @@ -612,7 +610,7 @@ void Image::Assign( const Image &image ) { subpixelorder = image.subpixelorder; size = new_size; } - + if(image.buffer != buffer) (*fptr_imgbufcpy)(buffer, image.buffer, size); } @@ -623,14 +621,14 @@ Image *Image::HighlightEdges( Rgb colour, unsigned int p_colours, unsigned int p { Panic( "Attempt to highlight image edges when colours = %d", colours ); } - + /* Convert the colour's RGBA subpixel order into the image's subpixel order */ colour = rgb_convert(colour,p_subpixelorder); - + /* Create a new image of the target format */ Image *high_image = new Image( width, height, p_colours, p_subpixelorder ); uint8_t* high_buff = high_image->WriteBuffer(width, height, p_colours, p_subpixelorder); - + /* Set image to all black */ high_image->Clear(); @@ -638,7 +636,7 @@ Image *Image::HighlightEdges( Rgb colour, unsigned int p_colours, unsigned int p unsigned int lo_y = limits?limits->Lo().Y():0; unsigned int hi_x = limits?limits->Hi().X():width-1; unsigned int hi_y = limits?limits->Hi().Y():height-1; - + if ( p_colours == ZM_COLOUR_GRAY8 ) { for ( unsigned int y = lo_y; y <= hi_y; y++ ) @@ -710,7 +708,7 @@ Image *Image::HighlightEdges( Rgb colour, unsigned int p_colours, unsigned int p } } } - + return( high_image ); } @@ -806,7 +804,7 @@ bool Image::ReadJpeg( const char *filename, unsigned int p_colours, unsigned int fclose( infile ); return( false ); } - + /* Check if the image has at least one huffman table defined. If not, use the standard ones */ /* This is required for the MJPEG capture palette of USB devices */ if(cinfo->dc_huff_tbl_ptrs[0] == NULL) { @@ -820,67 +818,67 @@ bool Image::ReadJpeg( const char *filename, unsigned int p_colours, unsigned int { Debug(9,"Image dimensions differ. Old: %ux%u New: %ux%u",width,height,new_width,new_height); } - + switch(p_colours) { case ZM_COLOUR_GRAY8: - { - cinfo->out_color_space = JCS_GRAYSCALE; - new_colours = ZM_COLOUR_GRAY8; - new_subpixelorder = ZM_SUBPIX_ORDER_NONE; - break; - } + { + cinfo->out_color_space = JCS_GRAYSCALE; + new_colours = ZM_COLOUR_GRAY8; + new_subpixelorder = ZM_SUBPIX_ORDER_NONE; + break; + } case ZM_COLOUR_RGB32: - { + { #ifdef JCS_EXTENSIONS - new_colours = ZM_COLOUR_RGB32; - if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - cinfo->out_color_space = JCS_EXT_BGRX; - new_subpixelorder = ZM_SUBPIX_ORDER_BGRA; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - cinfo->out_color_space = JCS_EXT_XRGB; - new_subpixelorder = ZM_SUBPIX_ORDER_ARGB; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - cinfo->out_color_space = JCS_EXT_XBGR; - new_subpixelorder = ZM_SUBPIX_ORDER_ABGR; - } else { - /* Assume RGBA */ - cinfo->out_color_space = JCS_EXT_RGBX; - new_subpixelorder = ZM_SUBPIX_ORDER_RGBA; - } - break; + new_colours = ZM_COLOUR_RGB32; + if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + cinfo->out_color_space = JCS_EXT_BGRX; + new_subpixelorder = ZM_SUBPIX_ORDER_BGRA; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + cinfo->out_color_space = JCS_EXT_XRGB; + new_subpixelorder = ZM_SUBPIX_ORDER_ARGB; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + cinfo->out_color_space = JCS_EXT_XBGR; + new_subpixelorder = ZM_SUBPIX_ORDER_ABGR; + } else { + /* Assume RGBA */ + cinfo->out_color_space = JCS_EXT_RGBX; + new_subpixelorder = ZM_SUBPIX_ORDER_RGBA; + } + break; #else - Warning("libjpeg-turbo is required for reading a JPEG directly into a RGB32 buffer, reading into a RGB24 buffer instead."); + Warning("libjpeg-turbo is required for reading a JPEG directly into a RGB32 buffer, reading into a RGB24 buffer instead."); #endif - } + } case ZM_COLOUR_RGB24: default: - { - new_colours = ZM_COLOUR_RGB24; - if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { + { + new_colours = ZM_COLOUR_RGB24; + if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_BGR; - new_subpixelorder = ZM_SUBPIX_ORDER_BGR; + cinfo->out_color_space = JCS_EXT_BGR; + new_subpixelorder = ZM_SUBPIX_ORDER_BGR; #else - Warning("libjpeg-turbo is required for reading a JPEG directly into a BGR24 buffer, reading into a RGB24 buffer instead."); - cinfo->out_color_space = JCS_RGB; - new_subpixelorder = ZM_SUBPIX_ORDER_RGB; + Warning("libjpeg-turbo is required for reading a JPEG directly into a BGR24 buffer, reading into a RGB24 buffer instead."); + cinfo->out_color_space = JCS_RGB; + new_subpixelorder = ZM_SUBPIX_ORDER_RGB; #endif - } else { - /* Assume RGB */ -/* + } else { + /* Assume RGB */ + /* #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_RGB; +cinfo->out_color_space = JCS_EXT_RGB; #else - cinfo->out_color_space = JCS_RGB; +cinfo->out_color_space = JCS_RGB; #endif -*/ - cinfo->out_color_space = JCS_RGB; - new_subpixelorder = ZM_SUBPIX_ORDER_RGB; - } - break; - } + */ + cinfo->out_color_space = JCS_RGB; + new_subpixelorder = ZM_SUBPIX_ORDER_RGB; + } + break; + } } - + if(WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == NULL) { Error("Failed requesting writeable buffer for reading JPEG image."); jpeg_abort_decompress( cinfo ); @@ -952,97 +950,97 @@ bool Image::WriteJpeg( const char *filename, int quality_override, struct timeva cinfo->image_width = width; /* image width and height, in pixels */ cinfo->image_height = height; - + switch(colours) { case ZM_COLOUR_GRAY8: - { - cinfo->input_components = 1; - cinfo->in_color_space = JCS_GRAYSCALE; - break; - } + { + cinfo->input_components = 1; + cinfo->in_color_space = JCS_GRAYSCALE; + break; + } case ZM_COLOUR_RGB32: - { + { #ifdef JCS_EXTENSIONS - cinfo->input_components = 4; - if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - cinfo->in_color_space = JCS_EXT_BGRX; - } else if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - cinfo->in_color_space = JCS_EXT_XRGB; - } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - cinfo->in_color_space = JCS_EXT_XBGR; - } else { - /* Assume RGBA */ - cinfo->in_color_space = JCS_EXT_RGBX; - } + cinfo->input_components = 4; + if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + cinfo->in_color_space = JCS_EXT_BGRX; + } else if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + cinfo->in_color_space = JCS_EXT_XRGB; + } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + cinfo->in_color_space = JCS_EXT_XBGR; + } else { + /* Assume RGBA */ + cinfo->in_color_space = JCS_EXT_RGBX; + } #else - Error("libjpeg-turbo is required for JPEG encoding directly from RGB32 source"); - jpeg_abort_compress( cinfo ); - fclose(outfile); - return(false); + Error("libjpeg-turbo is required for JPEG encoding directly from RGB32 source"); + jpeg_abort_compress( cinfo ); + fclose(outfile); + return(false); #endif - break; - } + break; + } case ZM_COLOUR_RGB24: default: - { - cinfo->input_components = 3; - if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { + { + cinfo->input_components = 3; + if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { #ifdef JCS_EXTENSIONS - cinfo->in_color_space = JCS_EXT_BGR; + cinfo->in_color_space = JCS_EXT_BGR; #else - Error("libjpeg-turbo is required for JPEG encoding directly from BGR24 source"); - jpeg_abort_compress( cinfo ); - fclose(outfile); - return(false); + Error("libjpeg-turbo is required for JPEG encoding directly from BGR24 source"); + jpeg_abort_compress( cinfo ); + fclose(outfile); + return(false); #endif - } else { - /* Assume RGB */ -/* + } else { + /* Assume RGB */ + /* #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_RGB; +cinfo->out_color_space = JCS_EXT_RGB; #else - cinfo->out_color_space = JCS_RGB; +cinfo->out_color_space = JCS_RGB; #endif -*/ - cinfo->in_color_space = JCS_RGB; - } - break; - } + */ + cinfo->in_color_space = JCS_RGB; + } + break; + } } - + jpeg_set_defaults( cinfo ); jpeg_set_quality( cinfo, quality, FALSE ); cinfo->dct_method = JDCT_FASTEST; jpeg_start_compress( cinfo, TRUE ); - if ( config.add_jpeg_comments && text[0] ) - { + if ( config.add_jpeg_comments && text[0] ) { jpeg_write_marker( cinfo, JPEG_COM, (const JOCTET *)text, strlen(text) ); } - // If we have a non-zero time (meaning a parameter was passed in), then form a simple exif segment with that time as DateTimeOriginal and SubsecTimeOriginal - // No timestamp just leave off the exif section. - if(timestamp.tv_sec) - { - #define EXIFTIMES_MS_OFFSET 0x36 // three decimal digits for milliseconds - #define EXIFTIMES_MS_LEN 0x03 - #define EXIFTIMES_OFFSET 0x3E // 19 characters format '2015:07:21 13:14:45' not including quotes - #define EXIFTIMES_LEN 0x13 // = 19 - #define EXIF_CODE 0xE1 + // If we have a non-zero time (meaning a parameter was passed in), then form a simple exif segment with that time as DateTimeOriginal and SubsecTimeOriginal + // No timestamp just leave off the exif section. + if(timestamp.tv_sec) + { +#define EXIFTIMES_MS_OFFSET 0x36 // three decimal digits for milliseconds +#define EXIFTIMES_MS_LEN 0x03 +#define EXIFTIMES_OFFSET 0x3E // 19 characters format '2015:07:21 13:14:45' not including quotes +#define EXIFTIMES_LEN 0x13 // = 19 +#define EXIF_CODE 0xE1 - char timebuf[64], msbuf[64]; - strftime(timebuf, sizeof timebuf, "%Y:%m:%d %H:%M:%S", localtime(&(timestamp.tv_sec))); - snprintf(msbuf, sizeof msbuf, "%06d",(int)(timestamp.tv_usec)); // we only use milliseconds because that's all defined in exif, but this is the whole microseconds because we have it - unsigned char exiftimes[82] = { - 0x45, 0x78, 0x69, 0x66, 0x00, 0x00, 0x49, 0x49, 0x2A, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, - 0x69, 0x87, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x03, 0x90, 0x02, 0x00, 0x14, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x91, 0x92, - 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0x00 }; - memcpy(&exiftimes[EXIFTIMES_OFFSET], timebuf,EXIFTIMES_LEN); - memcpy(&exiftimes[EXIFTIMES_MS_OFFSET], msbuf ,EXIFTIMES_MS_LEN); - jpeg_write_marker (cinfo, EXIF_CODE, (const JOCTET *)exiftimes, sizeof(exiftimes) ); - } + // This is a lot of stuff to allocate on the stack. Recommend char *timebuf[64]; + char timebuf[64], msbuf[64]; + strftime(timebuf, sizeof timebuf, "%Y:%m:%d %H:%M:%S", localtime(&(timestamp.tv_sec))); + snprintf(msbuf, sizeof msbuf, "%06d",(int)(timestamp.tv_usec)); // we only use milliseconds because that's all defined in exif, but this is the whole microseconds because we have it + unsigned char exiftimes[82] = { + 0x45, 0x78, 0x69, 0x66, 0x00, 0x00, 0x49, 0x49, 0x2A, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, + 0x69, 0x87, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x03, 0x90, 0x02, 0x00, 0x14, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x91, 0x92, + 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0x00 }; + memcpy(&exiftimes[EXIFTIMES_OFFSET], timebuf,EXIFTIMES_LEN); + memcpy(&exiftimes[EXIFTIMES_MS_OFFSET], msbuf, EXIFTIMES_MS_LEN); + jpeg_write_marker( cinfo, EXIF_CODE, (const JOCTET *)exiftimes, sizeof(exiftimes) ); + } JSAMPROW row_pointer; /* pointer to a single row */ int row_stride = cinfo->image_width * colours; /* physical row width in buffer */ @@ -1089,7 +1087,7 @@ bool Image::DecodeJpeg( const JOCTET *inbuffer, int inbuffer_size, unsigned int jpeg_abort_decompress( cinfo ); return( false ); } - + /* Check if the image has at least one huffman table defined. If not, use the standard ones */ /* This is required for the MJPEG capture palette of USB devices */ if(cinfo->dc_huff_tbl_ptrs[0] == NULL) { @@ -1103,67 +1101,67 @@ bool Image::DecodeJpeg( const JOCTET *inbuffer, int inbuffer_size, unsigned int { Debug(9,"Image dimensions differ. Old: %ux%u New: %ux%u",width,height,new_width,new_height); } - + switch(p_colours) { case ZM_COLOUR_GRAY8: - { - cinfo->out_color_space = JCS_GRAYSCALE; - new_colours = ZM_COLOUR_GRAY8; - new_subpixelorder = ZM_SUBPIX_ORDER_NONE; - break; - } + { + cinfo->out_color_space = JCS_GRAYSCALE; + new_colours = ZM_COLOUR_GRAY8; + new_subpixelorder = ZM_SUBPIX_ORDER_NONE; + break; + } case ZM_COLOUR_RGB32: - { + { #ifdef JCS_EXTENSIONS - new_colours = ZM_COLOUR_RGB32; - if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - cinfo->out_color_space = JCS_EXT_BGRX; - new_subpixelorder = ZM_SUBPIX_ORDER_BGRA; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - cinfo->out_color_space = JCS_EXT_XRGB; - new_subpixelorder = ZM_SUBPIX_ORDER_ARGB; - } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - cinfo->out_color_space = JCS_EXT_XBGR; - new_subpixelorder = ZM_SUBPIX_ORDER_ABGR; - } else { - /* Assume RGBA */ - cinfo->out_color_space = JCS_EXT_RGBX; - new_subpixelorder = ZM_SUBPIX_ORDER_RGBA; - } - break; + new_colours = ZM_COLOUR_RGB32; + if(p_subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + cinfo->out_color_space = JCS_EXT_BGRX; + new_subpixelorder = ZM_SUBPIX_ORDER_BGRA; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + cinfo->out_color_space = JCS_EXT_XRGB; + new_subpixelorder = ZM_SUBPIX_ORDER_ARGB; + } else if(p_subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + cinfo->out_color_space = JCS_EXT_XBGR; + new_subpixelorder = ZM_SUBPIX_ORDER_ABGR; + } else { + /* Assume RGBA */ + cinfo->out_color_space = JCS_EXT_RGBX; + new_subpixelorder = ZM_SUBPIX_ORDER_RGBA; + } + break; #else - Warning("libjpeg-turbo is required for reading a JPEG directly into a RGB32 buffer, reading into a RGB24 buffer instead."); + Warning("libjpeg-turbo is required for reading a JPEG directly into a RGB32 buffer, reading into a RGB24 buffer instead."); #endif - } + } case ZM_COLOUR_RGB24: default: - { - new_colours = ZM_COLOUR_RGB24; - if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { + { + new_colours = ZM_COLOUR_RGB24; + if(p_subpixelorder == ZM_SUBPIX_ORDER_BGR) { #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_BGR; - new_subpixelorder = ZM_SUBPIX_ORDER_BGR; + cinfo->out_color_space = JCS_EXT_BGR; + new_subpixelorder = ZM_SUBPIX_ORDER_BGR; #else - Warning("libjpeg-turbo is required for reading a JPEG directly into a BGR24 buffer, reading into a RGB24 buffer instead."); - cinfo->out_color_space = JCS_RGB; - new_subpixelorder = ZM_SUBPIX_ORDER_RGB; + Warning("libjpeg-turbo is required for reading a JPEG directly into a BGR24 buffer, reading into a RGB24 buffer instead."); + cinfo->out_color_space = JCS_RGB; + new_subpixelorder = ZM_SUBPIX_ORDER_RGB; #endif - } else { - /* Assume RGB */ -/* + } else { + /* Assume RGB */ + /* #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_RGB; +cinfo->out_color_space = JCS_EXT_RGB; #else - cinfo->out_color_space = JCS_RGB; +cinfo->out_color_space = JCS_RGB; #endif -*/ - cinfo->out_color_space = JCS_RGB; - new_subpixelorder = ZM_SUBPIX_ORDER_RGB; - } - break; - } + */ + cinfo->out_color_space = JCS_RGB; + new_subpixelorder = ZM_SUBPIX_ORDER_RGB; + } + break; + } } - + if(WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == NULL) { Error("Failed requesting writeable buffer for reading JPEG image."); jpeg_abort_decompress( cinfo ); @@ -1214,59 +1212,59 @@ bool Image::EncodeJpeg( JOCTET *outbuffer, int *outbuffer_size, int quality_over switch(colours) { case ZM_COLOUR_GRAY8: - { - cinfo->input_components = 1; - cinfo->in_color_space = JCS_GRAYSCALE; - break; - } + { + cinfo->input_components = 1; + cinfo->in_color_space = JCS_GRAYSCALE; + break; + } case ZM_COLOUR_RGB32: - { + { #ifdef JCS_EXTENSIONS - cinfo->input_components = 4; - if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - cinfo->in_color_space = JCS_EXT_BGRX; - } else if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - cinfo->in_color_space = JCS_EXT_XRGB; - } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - cinfo->in_color_space = JCS_EXT_XBGR; - } else { - /* Assume RGBA */ - cinfo->in_color_space = JCS_EXT_RGBX; - } + cinfo->input_components = 4; + if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + cinfo->in_color_space = JCS_EXT_BGRX; + } else if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + cinfo->in_color_space = JCS_EXT_XRGB; + } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + cinfo->in_color_space = JCS_EXT_XBGR; + } else { + /* Assume RGBA */ + cinfo->in_color_space = JCS_EXT_RGBX; + } #else - Error("libjpeg-turbo is required for JPEG encoding directly from RGB32 source"); - jpeg_abort_compress( cinfo ); - return(false); + Error("libjpeg-turbo is required for JPEG encoding directly from RGB32 source"); + jpeg_abort_compress( cinfo ); + return(false); #endif - break; - } + break; + } case ZM_COLOUR_RGB24: default: - { - cinfo->input_components = 3; - if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { + { + cinfo->input_components = 3; + if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { #ifdef JCS_EXTENSIONS - cinfo->in_color_space = JCS_EXT_BGR; + cinfo->in_color_space = JCS_EXT_BGR; #else - Error("libjpeg-turbo is required for JPEG encoding directly from BGR24 source"); - jpeg_abort_compress( cinfo ); - return(false); + Error("libjpeg-turbo is required for JPEG encoding directly from BGR24 source"); + jpeg_abort_compress( cinfo ); + return(false); #endif - } else { - /* Assume RGB */ -/* + } else { + /* Assume RGB */ + /* #ifdef JCS_EXTENSIONS - cinfo->out_color_space = JCS_EXT_RGB; +cinfo->out_color_space = JCS_EXT_RGB; #else - cinfo->out_color_space = JCS_RGB; +cinfo->out_color_space = JCS_RGB; #endif -*/ - cinfo->in_color_space = JCS_RGB; - } - break; - } + */ + cinfo->in_color_space = JCS_RGB; + } + break; + } } - + jpeg_set_defaults( cinfo ); jpeg_set_quality( cinfo, quality, FALSE ); cinfo->dct_method = JDCT_FASTEST; @@ -1339,7 +1337,7 @@ bool Image::Crop( unsigned int lo_x, unsigned int lo_y, unsigned int hi_x, unsig unsigned int new_size = new_width*new_height*colours; uint8_t *new_buffer = AllocBuffer(new_size); - + unsigned int new_stride = new_width*colours; for ( unsigned int y = lo_y, ny = 0; y <= hi_y; y++, ny++ ) { @@ -1366,17 +1364,17 @@ void Image::Overlay( const Image &image ) { Panic( "Attempt to overlay different sized images, expected %dx%d, got %dx%d", width, height, image.width, image.height ); } - + if( colours == image.colours && subpixelorder != image.subpixelorder ) { Warning("Attempt to overlay images of same format but with different subpixel order."); } - + /* Grayscale ontop of grayscale - complete */ if ( colours == ZM_COLOUR_GRAY8 && image.colours == ZM_COLOUR_GRAY8 ) { const uint8_t* const max_ptr = buffer+size; const uint8_t* psrc = image.buffer; uint8_t* pdest = buffer; - + while( pdest < max_ptr ) { if ( *psrc ) @@ -1386,15 +1384,15 @@ void Image::Overlay( const Image &image ) pdest++; psrc++; } - - /* RGB24 ontop of grayscale - convert to same format first - complete */ + + /* RGB24 ontop of grayscale - convert to same format first - complete */ } else if ( colours == ZM_COLOUR_GRAY8 && image.colours == ZM_COLOUR_RGB24 ) { Colourise(image.colours, image.subpixelorder); - + const uint8_t* const max_ptr = buffer+size; const uint8_t* psrc = image.buffer; uint8_t* pdest = buffer; - + while( pdest < max_ptr ) { if ( RED_PTR_RGBA(psrc) || GREEN_PTR_RGBA(psrc) || BLUE_PTR_RGBA(psrc) ) @@ -1406,15 +1404,15 @@ void Image::Overlay( const Image &image ) pdest += 3; psrc += 3; } - - /* RGB32 ontop of grayscale - convert to same format first - complete */ + + /* RGB32 ontop of grayscale - convert to same format first - complete */ } else if( colours == ZM_COLOUR_GRAY8 && image.colours == ZM_COLOUR_RGB32 ) { Colourise(image.colours, image.subpixelorder); - + const Rgb* const max_ptr = (Rgb*)(buffer+size); const Rgb* prsrc = (Rgb*)image.buffer; Rgb* prdest = (Rgb*)buffer; - + if(subpixelorder == ZM_SUBPIX_ORDER_RGBA || subpixelorder == ZM_SUBPIX_ORDER_BGRA) { /* RGB\BGR\RGBA\BGRA subpixel order - Alpha byte is last */ while (prdest < max_ptr) { @@ -1436,13 +1434,13 @@ void Image::Overlay( const Image &image ) prsrc++; } } - - /* Grayscale ontop of RGB24 - complete */ + + /* Grayscale ontop of RGB24 - complete */ } else if ( colours == ZM_COLOUR_RGB24 && image.colours == ZM_COLOUR_GRAY8 ) { const uint8_t* const max_ptr = buffer+size; const uint8_t* psrc = image.buffer; uint8_t* pdest = buffer; - + while( pdest < max_ptr ) { if ( *psrc ) @@ -1452,13 +1450,13 @@ void Image::Overlay( const Image &image ) pdest += 3; psrc++; } - - /* RGB24 ontop of RGB24 - not complete. need to take care of different subpixel orders */ + + /* RGB24 ontop of RGB24 - not complete. need to take care of different subpixel orders */ } else if ( colours == ZM_COLOUR_RGB24 && image.colours == ZM_COLOUR_RGB24 ) { const uint8_t* const max_ptr = buffer+size; const uint8_t* psrc = image.buffer; uint8_t* pdest = buffer; - + while( pdest < max_ptr ) { if ( RED_PTR_RGBA(psrc) || GREEN_PTR_RGBA(psrc) || BLUE_PTR_RGBA(psrc) ) @@ -1470,17 +1468,17 @@ void Image::Overlay( const Image &image ) pdest += 3; psrc += 3; } - - /* RGB32 ontop of RGB24 - TO BE DONE */ + + /* RGB32 ontop of RGB24 - TO BE DONE */ } else if ( colours == ZM_COLOUR_RGB24 && image.colours == ZM_COLOUR_RGB32 ) { Error("Overlay of RGB32 ontop of RGB24 is not supported."); - - /* Grayscale ontop of RGB32 - complete */ + + /* Grayscale ontop of RGB32 - complete */ } else if ( colours == ZM_COLOUR_RGB32 && image.colours == ZM_COLOUR_GRAY8 ) { const Rgb* const max_ptr = (Rgb*)(buffer+size); Rgb* prdest = (Rgb*)buffer; const uint8_t* psrc = image.buffer; - + if(subpixelorder == ZM_SUBPIX_ORDER_RGBA || subpixelorder == ZM_SUBPIX_ORDER_BGRA) { /* RGBA\BGRA subpixel order - Alpha byte is last */ while (prdest < max_ptr) { @@ -1502,17 +1500,17 @@ void Image::Overlay( const Image &image ) psrc++; } } - - /* RGB24 ontop of RGB32 - TO BE DONE */ + + /* RGB24 ontop of RGB32 - TO BE DONE */ } else if ( colours == ZM_COLOUR_RGB32 && image.colours == ZM_COLOUR_RGB24 ) { Error("Overlay of RGB24 ontop of RGB32 is not supported."); - - /* RGB32 ontop of RGB32 - not complete. need to take care of different subpixel orders */ + + /* RGB32 ontop of RGB32 - not complete. need to take care of different subpixel orders */ } else if ( colours == ZM_COLOUR_RGB32 && image.colours == ZM_COLOUR_RGB32 ) { const Rgb* const max_ptr = (Rgb*)(buffer+size); Rgb* prdest = (Rgb*)buffer; const Rgb* prsrc = (Rgb*)image.buffer; - + if(image.subpixelorder == ZM_SUBPIX_ORDER_RGBA || image.subpixelorder == ZM_SUBPIX_ORDER_BGRA) { /* RGB\BGR\RGBA\BGRA subpixel order - Alpha byte is last */ while (prdest < max_ptr) { @@ -1535,7 +1533,7 @@ void Image::Overlay( const Image &image ) } } } - + } /* RGB32 compatible: complete */ @@ -1600,7 +1598,7 @@ void Image::Overlay( const Image &image, unsigned int x, unsigned int y ) } else { Error("Overlay called with unexpected colours: %d", colours); } - + } void Image::Blend( const Image &image, int transparency ) @@ -1611,33 +1609,33 @@ void Image::Blend( const Image &image, int transparency ) unsigned long milpixels; #endif uint8_t* new_buffer; - + if ( !(width == image.width && height == image.height && colours == image.colours && subpixelorder == image.subpixelorder) ) { Panic( "Attempt to blend different sized images, expected %dx%dx%d %d, got %dx%dx%d %d", width, height, colours, subpixelorder, image.width, image.height, image.colours, image.subpixelorder ); } - + if(transparency <= 0) return; - + new_buffer = AllocBuffer(size); - + #ifdef ZM_IMAGE_PROFILING clock_gettime(CLOCK_THREAD_CPUTIME_ID,&start); #endif - + /* Do the blending */ (*fptr_blend)(buffer, image.buffer, new_buffer, size, transparency); - + #ifdef ZM_IMAGE_PROFILING clock_gettime(CLOCK_THREAD_CPUTIME_ID,&end); timespec_diff(&start,&end,&diff); - + executetime = (1000000000ull * diff.tv_sec) + diff.tv_nsec; milpixels = (unsigned long)((long double)size)/((((long double)executetime)/1000)); Debug(5, "Blend: %u colours blended in %llu nanoseconds, %lu million colours/s\n",size,executetime,milpixels); #endif - + AssignDirect( width, height, colours, subpixelorder, new_buffer, size, ZM_BUFTYPE_ZM); } @@ -1729,6 +1727,8 @@ Image *Image::Highlight( unsigned int n_images, Image *images[], const Rgb thres unsigned int size = result->size; for ( unsigned int c = 0; c < colours; c++ ) { + unsigned int ref_colour_rgb = RGB_VAL(ref_colour,c); + for ( unsigned int i = 0; i < size; i++ ) { unsigned int count = 0; @@ -1737,7 +1737,7 @@ Image *Image::Highlight( unsigned int n_images, Image *images[], const Rgb thres { uint8_t *psrc = images[j]->buffer+c; - unsigned int diff = ((*psrc)-RGB_VAL(ref_colour,c)) > 0 ? (*psrc)-RGB_VAL(ref_colour,c) : RGB_VAL(ref_colour,c) - (*psrc); + unsigned int diff = ((*psrc)-ref_colour_rgb) > 0 ? (*psrc)-ref_colour_rgb : ref_colour_rgb - (*psrc); if (diff >= RGB_VAL(threshold,c)) { @@ -1760,63 +1760,63 @@ void Image::Delta( const Image &image, Image* targetimage) const unsigned long long executetime; unsigned long milpixels; #endif - + if ( !(width == image.width && height == image.height && colours == image.colours && subpixelorder == image.subpixelorder) ) { Panic( "Attempt to get delta of different sized images, expected %dx%dx%d %d, got %dx%dx%d %d", width, height, colours, subpixelorder, image.width, image.height, image.colours, image.subpixelorder); } - + uint8_t *pdiff = targetimage->WriteBuffer(width, height, ZM_COLOUR_GRAY8, ZM_SUBPIX_ORDER_NONE); - + if(pdiff == NULL) { Panic("Failed requesting writeable buffer for storing the delta image"); } - + #ifdef ZM_IMAGE_PROFILING clock_gettime(CLOCK_THREAD_CPUTIME_ID,&start); #endif - + switch(colours) { case ZM_COLOUR_RGB24: - { - if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { - /* BGR subpixel order */ - (*fptr_delta8_bgr)(buffer, image.buffer, pdiff, pixels); - } else { - /* Assume RGB subpixel order */ - (*fptr_delta8_rgb)(buffer, image.buffer, pdiff, pixels); - } - break; - } + { + if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { + /* BGR subpixel order */ + (*fptr_delta8_bgr)(buffer, image.buffer, pdiff, pixels); + } else { + /* Assume RGB subpixel order */ + (*fptr_delta8_rgb)(buffer, image.buffer, pdiff, pixels); + } + break; + } case ZM_COLOUR_RGB32: - { - if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - /* ARGB subpixel order */ - (*fptr_delta8_argb)(buffer, image.buffer, pdiff, pixels); - } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - /* ABGR subpixel order */ - (*fptr_delta8_abgr)(buffer, image.buffer, pdiff, pixels); - } else if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - /* BGRA subpixel order */ - (*fptr_delta8_bgra)(buffer, image.buffer, pdiff, pixels); - } else { - /* Assume RGBA subpixel order */ - (*fptr_delta8_rgba)(buffer, image.buffer, pdiff, pixels); - } - break; - } + { + if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + /* ARGB subpixel order */ + (*fptr_delta8_argb)(buffer, image.buffer, pdiff, pixels); + } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + /* ABGR subpixel order */ + (*fptr_delta8_abgr)(buffer, image.buffer, pdiff, pixels); + } else if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + /* BGRA subpixel order */ + (*fptr_delta8_bgra)(buffer, image.buffer, pdiff, pixels); + } else { + /* Assume RGBA subpixel order */ + (*fptr_delta8_rgba)(buffer, image.buffer, pdiff, pixels); + } + break; + } case ZM_COLOUR_GRAY8: - (*fptr_delta8_gray8)(buffer, image.buffer, pdiff, pixels); - break; + (*fptr_delta8_gray8)(buffer, image.buffer, pdiff, pixels); + break; default: - Panic("Delta called with unexpected colours: %d",colours); - break; + Panic("Delta called with unexpected colours: %d",colours); + break; } - + #ifdef ZM_IMAGE_PROFILING clock_gettime(CLOCK_THREAD_CPUTIME_ID,&end); timespec_diff(&start,&end,&diff); - + executetime = (1000000000ull * diff.tv_sec) + diff.tv_nsec; milpixels = (unsigned long)((long double)pixels)/((((long double)executetime)/1000)); Debug(5, "Delta: %u delta pixels generated in %llu nanoseconds, %lu million pixels/s\n",pixels,executetime,milpixels); @@ -1887,7 +1887,7 @@ void Image::MaskPrivacy( const unsigned char *p_bitmask, const Rgb pixel_colour } } else if ( colours == ZM_COLOUR_RGB32 ) - { + { for ( unsigned int x = 0; x < width; x++, ptr += colours ) { Rgb *temp_ptr = (Rgb*)ptr; @@ -1895,10 +1895,10 @@ void Image::MaskPrivacy( const unsigned char *p_bitmask, const Rgb pixel_colour *temp_ptr = pixel_rgb_col; i++; } - } else { - Panic("MaskPrivacy called with unexpected colours: %d", colours); - return; - } + } else { + Panic("MaskPrivacy called with unexpected colours: %d", colours); + return; + } } } @@ -1920,7 +1920,7 @@ void Image::Annotate( const char *p_text, const Coord &coord, const unsigned int const uint8_t fg_bw_col = fg_colour & 0xff; const Rgb fg_rgb_col = rgb_convert(fg_colour,subpixelorder); const bool fg_trans = (fg_colour == RGB_TRANSPARENT); - + const uint8_t bg_r_col = RED_VAL_RGBA(bg_colour); const uint8_t bg_g_col = GREEN_VAL_RGBA(bg_colour); const uint8_t bg_b_col = BLUE_VAL_RGBA(bg_colour); @@ -2028,7 +2028,7 @@ void Image::Annotate( const char *p_text, const Coord &coord, const unsigned int } } else if ( colours == ZM_COLOUR_RGB32 ) - { + { unsigned int wc = width * colours; uint8_t *ptr = &buffer[((lo_line_y*width)+lo_line_x)<<2]; @@ -2048,22 +2048,22 @@ void Image::Annotate( const char *p_text, const Coord &coord, const unsigned int { if ( !fg_trans ) { - *temp_ptr = fg_rgb_col; + *temp_ptr = fg_rgb_col; } } else if ( !bg_trans ) { - *temp_ptr = bg_rgb_col; + *temp_ptr = bg_rgb_col; } } } } - - } else { - Panic("Annotate called with unexpected colours: %d",colours); - return; - } - + + } else { + Panic("Annotate called with unexpected colours: %d",colours); + return; + } + index += line_len; while ( text[index] == '\n' ) { @@ -2074,18 +2074,14 @@ void Image::Annotate( const char *p_text, const Coord &coord, const unsigned int } } -void Image::Timestamp( const char *label, const time_t when, const Coord &coord, const int size ) -{ +void Image::Timestamp( const char *label, const time_t when, const Coord &coord, const int size ) { char time_text[64]; strftime( time_text, sizeof(time_text), "%y/%m/%d %H:%M:%S", localtime( &when ) ); char text[64]; - if ( label ) - { + if ( label ) { snprintf( text, sizeof(text), "%s - %s", label, time_text ); Annotate( text, coord, size ); - } - else - { + } else { Annotate( time_text, coord, size ); } } @@ -2094,21 +2090,21 @@ void Image::Timestamp( const char *label, const time_t when, const Coord &coord, void Image::Colourise(const unsigned int p_reqcolours, const unsigned int p_reqsubpixelorder) { Debug(9, "Colourise: Req colours: %u Req subpixel order: %u Current colours: %u Current subpixel order: %u",p_reqcolours,p_reqsubpixelorder,colours,subpixelorder); - + if ( colours != ZM_COLOUR_GRAY8) { Warning("Target image is already colourised, colours: %u",colours); return; } - + if ( p_reqcolours == ZM_COLOUR_RGB32 ) { /* RGB32 */ Rgb* new_buffer = (Rgb*)AllocBuffer(pixels*sizeof(Rgb)); - + const uint8_t *psrc = buffer; Rgb* pdest = new_buffer; Rgb subpixel; Rgb newpixel; - + if ( p_reqsubpixelorder == ZM_SUBPIX_ORDER_ABGR || p_reqsubpixelorder == ZM_SUBPIX_ORDER_ARGB) { /* ARGB\ABGR subpixel order. alpha byte is first (mem+0), so we need to shift the pixel left in the end */ for(unsigned int i=0;i= 35) { /* Use SSSE3 functions */ switch(subpixelorder) { case ZM_SUBPIX_ORDER_BGRA: - ssse3_convert_bgra_gray8(buffer,buffer,pixels); - break; + ssse3_convert_bgra_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_ARGB: - ssse3_convert_argb_gray8(buffer,buffer,pixels); - break; + ssse3_convert_argb_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_ABGR: - ssse3_convert_abgr_gray8(buffer,buffer,pixels); - break; + ssse3_convert_abgr_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_RGBA: default: - ssse3_convert_rgba_gray8(buffer,buffer,pixels); - break; + ssse3_convert_rgba_gray8(buffer,buffer,pixels); + break; } } else { /* Use standard functions */ @@ -2181,29 +2177,29 @@ void Image::DeColourise() { switch(subpixelorder) { case ZM_SUBPIX_ORDER_BGRA: - std_convert_bgra_gray8(buffer,buffer,pixels); - break; + std_convert_bgra_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_ARGB: - std_convert_argb_gray8(buffer,buffer,pixels); - break; + std_convert_argb_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_ABGR: - std_convert_abgr_gray8(buffer,buffer,pixels); - break; + std_convert_abgr_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_RGBA: default: - std_convert_rgba_gray8(buffer,buffer,pixels); - break; + std_convert_rgba_gray8(buffer,buffer,pixels); + break; } } else { /* Assume RGB24 */ switch(subpixelorder) { case ZM_SUBPIX_ORDER_BGR: - std_convert_bgr_gray8(buffer,buffer,pixels); - break; + std_convert_bgr_gray8(buffer,buffer,pixels); + break; case ZM_SUBPIX_ORDER_RGB: default: - std_convert_rgb_gray8(buffer,buffer,pixels); - break; + std_convert_rgb_gray8(buffer,buffer,pixels); + break; } } } @@ -2216,10 +2212,10 @@ void Image::Fill( Rgb colour, const Box *limits ) { Panic( "Attempt to fill image with unexpected colours %d", colours ); } - + /* Convert the colour's RGBA subpixel order into the image's subpixel order */ colour = rgb_convert(colour,subpixelorder); - + unsigned int lo_x = limits?limits->Lo().X():0; unsigned int lo_y = limits?limits->Lo().Y():0; unsigned int hi_x = limits?limits->Hi().X():width-1; @@ -2253,7 +2249,7 @@ void Image::Fill( Rgb colour, const Box *limits ) for ( unsigned int y = lo_y; y <= (unsigned int)hi_y; y++ ) { Rgb *p = (Rgb*)&buffer[((y*width)+lo_x)<<2]; - + for ( unsigned int x = lo_x; x <= (unsigned int)hi_x; x++, p++) { /* Fast, copies the entire pixel in a single pass */ @@ -2269,12 +2265,12 @@ void Image::Fill( Rgb colour, int density, const Box *limits ) /* Allow the faster version to be used if density is not used (density=1) */ if(density <= 1) return Fill(colour,limits); - + if ( !(colours == ZM_COLOUR_GRAY8 || colours == ZM_COLOUR_RGB24 || colours == ZM_COLOUR_RGB32 ) ) { Panic( "Attempt to fill image with unexpected colours %d", colours ); } - + /* Convert the colour's RGBA subpixel order into the image's subpixel order */ colour = rgb_convert(colour,subpixelorder); @@ -2323,7 +2319,7 @@ void Image::Fill( Rgb colour, int density, const Box *limits ) } } } - + } /* RGB32 compatible: complete */ @@ -2333,10 +2329,10 @@ void Image::Outline( Rgb colour, const Polygon &polygon ) { Panic( "Attempt to outline image with unexpected colours %d", colours ); } - + /* Convert the colour's RGBA subpixel order into the image's subpixel order */ colour = rgb_convert(colour,subpixelorder); - + int n_coords = polygon.getNumCoords(); for ( int j = 0, i = n_coords-1; j < n_coords; i = j++ ) { @@ -2430,7 +2426,7 @@ void Image::Outline( Rgb colour, const Polygon &polygon ) *(Rgb*)(buffer+(((int(round(y))*width)+x)<<2)) = colour; } } - + } } } @@ -2442,7 +2438,7 @@ void Image::Fill( Rgb colour, int density, const Polygon &polygon ) { Panic( "Attempt to fill image with unexpected colours %d", colours ); } - + /* Convert the colour's RGBA subpixel order into the image's subpixel order */ colour = rgb_convert(colour,subpixelorder); @@ -2588,7 +2584,7 @@ void Image::Fill( Rgb colour, const Polygon &polygon ) /* RGB32 compatible: complete */ void Image::Rotate( int angle ) { - + angle %= 360; if ( !angle ) @@ -2599,7 +2595,7 @@ void Image::Rotate( int angle ) { return; } - + unsigned int new_height = height; unsigned int new_width = width; uint8_t* rotate_buffer = AllocBuffer(size); @@ -2607,157 +2603,157 @@ void Image::Rotate( int angle ) switch( angle ) { case 90 : - { - new_height = width; - new_width = height; + { + new_height = width; + new_width = height; - unsigned int line_bytes = new_width*colours; - unsigned char *s_ptr = buffer; + unsigned int line_bytes = new_width*colours; + unsigned char *s_ptr = buffer; - if ( colours == ZM_COLOUR_GRAY8 ) - { - unsigned char *d_ptr; - for ( unsigned int i = new_width; i > 0; i-- ) + if ( colours == ZM_COLOUR_GRAY8 ) { - d_ptr = rotate_buffer+(i-1); - for ( unsigned int j = new_height; j > 0; j-- ) + unsigned char *d_ptr; + for ( unsigned int i = new_width; i > 0; i-- ) { - *d_ptr = *s_ptr++; - d_ptr += line_bytes; + d_ptr = rotate_buffer+(i-1); + for ( unsigned int j = new_height; j > 0; j-- ) + { + *d_ptr = *s_ptr++; + d_ptr += line_bytes; + } } } - } - else if ( colours == ZM_COLOUR_RGB32 ) - { - Rgb* s_rptr = (Rgb*)s_ptr; - Rgb* d_rptr; - for ( unsigned int i = new_width; i > 0; i-- ) + else if ( colours == ZM_COLOUR_RGB32 ) { - d_rptr = (Rgb*)(rotate_buffer+((i-1)<<2)); - for ( unsigned int j = new_height; j > 0; j-- ) + Rgb* s_rptr = (Rgb*)s_ptr; + Rgb* d_rptr; + for ( unsigned int i = new_width; i > 0; i-- ) { - *d_rptr = *s_rptr++; - d_rptr += new_width; + d_rptr = (Rgb*)(rotate_buffer+((i-1)<<2)); + for ( unsigned int j = new_height; j > 0; j-- ) + { + *d_rptr = *s_rptr++; + d_rptr += new_width; + } } } - } - else /* Assume RGB24 */ - { - unsigned char *d_ptr; - for ( unsigned int i = new_width; i > 0; i-- ) + else /* Assume RGB24 */ { - d_ptr = rotate_buffer+((i-1)*3); - for ( unsigned int j = new_height; j > 0; j-- ) + unsigned char *d_ptr; + for ( unsigned int i = new_width; i > 0; i-- ) { - *d_ptr = *s_ptr++; - *(d_ptr+1) = *s_ptr++; - *(d_ptr+2) = *s_ptr++; - d_ptr += line_bytes; + d_ptr = rotate_buffer+((i-1)*3); + for ( unsigned int j = new_height; j > 0; j-- ) + { + *d_ptr = *s_ptr++; + *(d_ptr+1) = *s_ptr++; + *(d_ptr+2) = *s_ptr++; + d_ptr += line_bytes; + } } } + break; } - break; - } case 180 : - { - unsigned char *s_ptr = buffer+size; - unsigned char *d_ptr = rotate_buffer; + { + unsigned char *s_ptr = buffer+size; + unsigned char *d_ptr = rotate_buffer; - if ( colours == ZM_COLOUR_GRAY8 ) - { - while( s_ptr > buffer ) + if ( colours == ZM_COLOUR_GRAY8 ) { - s_ptr--; - *d_ptr++ = *s_ptr; - } - } - else if ( colours == ZM_COLOUR_RGB32 ) - { - Rgb* s_rptr = (Rgb*)s_ptr; - Rgb* d_rptr = (Rgb*)d_ptr; - while( s_rptr > (Rgb*)buffer ) - { - s_rptr--; - *d_rptr++ = *s_rptr; - } - } - else /* Assume RGB24 */ - { - while( s_ptr > buffer ) - { - s_ptr -= 3; - *d_ptr++ = *s_ptr; - *d_ptr++ = *(s_ptr+1); - *d_ptr++ = *(s_ptr+2); - } - } - break; - } - case 270 : - { - new_height = width; - new_width = height; - - unsigned int line_bytes = new_width*colours; - unsigned char *s_ptr = buffer+size; - - if ( colours == ZM_COLOUR_GRAY8 ) - { - unsigned char *d_ptr; - for ( unsigned int i = new_width; i > 0; i-- ) - { - d_ptr = rotate_buffer+(i-1); - for ( unsigned int j = new_height; j > 0; j-- ) + while( s_ptr > buffer ) { s_ptr--; - *d_ptr = *s_ptr; - d_ptr += line_bytes; + *d_ptr++ = *s_ptr; } } - } - else if ( colours == ZM_COLOUR_RGB32 ) - { - Rgb* s_rptr = (Rgb*)s_ptr; - Rgb* d_rptr; - for ( unsigned int i = new_width; i > 0; i-- ) + else if ( colours == ZM_COLOUR_RGB32 ) { - d_rptr = (Rgb*)(rotate_buffer+((i-1)<<2)); - for ( unsigned int j = new_height; j > 0; j-- ) + Rgb* s_rptr = (Rgb*)s_ptr; + Rgb* d_rptr = (Rgb*)d_ptr; + while( s_rptr > (Rgb*)buffer ) { s_rptr--; - *d_rptr = *s_rptr; - d_rptr += new_width; + *d_rptr++ = *s_rptr; } } - } - else /* Assume RGB24 */ - { - unsigned char *d_ptr; - for ( unsigned int i = new_width; i > 0; i-- ) + else /* Assume RGB24 */ { - d_ptr = rotate_buffer+((i-1)*3); - for ( unsigned int j = new_height; j > 0; j-- ) + while( s_ptr > buffer ) { - *(d_ptr+2) = *(--s_ptr); - *(d_ptr+1) = *(--s_ptr); - *d_ptr = *(--s_ptr); - d_ptr += line_bytes; + s_ptr -= 3; + *d_ptr++ = *s_ptr; + *d_ptr++ = *(s_ptr+1); + *d_ptr++ = *(s_ptr+2); } } + break; + } + case 270 : + { + new_height = width; + new_width = height; + + unsigned int line_bytes = new_width*colours; + unsigned char *s_ptr = buffer+size; + + if ( colours == ZM_COLOUR_GRAY8 ) + { + unsigned char *d_ptr; + for ( unsigned int i = new_width; i > 0; i-- ) + { + d_ptr = rotate_buffer+(i-1); + for ( unsigned int j = new_height; j > 0; j-- ) + { + s_ptr--; + *d_ptr = *s_ptr; + d_ptr += line_bytes; + } + } + } + else if ( colours == ZM_COLOUR_RGB32 ) + { + Rgb* s_rptr = (Rgb*)s_ptr; + Rgb* d_rptr; + for ( unsigned int i = new_width; i > 0; i-- ) + { + d_rptr = (Rgb*)(rotate_buffer+((i-1)<<2)); + for ( unsigned int j = new_height; j > 0; j-- ) + { + s_rptr--; + *d_rptr = *s_rptr; + d_rptr += new_width; + } + } + } + else /* Assume RGB24 */ + { + unsigned char *d_ptr; + for ( unsigned int i = new_width; i > 0; i-- ) + { + d_ptr = rotate_buffer+((i-1)*3); + for ( unsigned int j = new_height; j > 0; j-- ) + { + *(d_ptr+2) = *(--s_ptr); + *(d_ptr+1) = *(--s_ptr); + *d_ptr = *(--s_ptr); + d_ptr += line_bytes; + } + } + } + break; } - break; - } } - + AssignDirect( new_width, new_height, colours, subpixelorder, rotate_buffer, size, ZM_BUFTYPE_ZM); - + } /* RGB32 compatible: complete */ void Image::Flip( bool leftright ) { uint8_t* flip_buffer = AllocBuffer(size); - + unsigned int line_bytes = width*colours; unsigned int line_bytes2 = 2*line_bytes; if ( leftright ) @@ -2822,9 +2818,9 @@ void Image::Flip( bool leftright ) d_ptr += line_bytes; } } - + AssignDirect( width, height, colours, subpixelorder, flip_buffer, size, ZM_BUFTYPE_ZM); - + } void Image::Scale( unsigned int factor ) @@ -2841,11 +2837,11 @@ void Image::Scale( unsigned int factor ) unsigned int new_width = (width*factor)/ZM_SCALE_BASE; unsigned int new_height = (height*factor)/ZM_SCALE_BASE; - + size_t scale_buffer_size = (new_width+1) * (new_height+1) * colours; - + uint8_t* scale_buffer = AllocBuffer(scale_buffer_size); - + if ( factor > ZM_SCALE_BASE ) { unsigned char *pd = scale_buffer; @@ -2912,7 +2908,7 @@ void Image::Scale( unsigned int factor ) { w_count += factor; w_index = w_count/ZM_SCALE_BASE; - + if ( w_index > last_w_index ) { for ( unsigned int c = 0; c < colours; c++ ) @@ -2932,15 +2928,15 @@ void Image::Scale( unsigned int factor ) new_width = last_w_index; new_height = last_h_index; } - + AssignDirect( new_width, new_height, colours, subpixelorder, scale_buffer, scale_buffer_size, ZM_BUFTYPE_ZM); - + } void Image::Deinterlace_Discard() { /* Simple deinterlacing. Copy the even lines into the odd lines */ - + if ( colours == ZM_COLOUR_GRAY8 ) { const uint8_t *psrc; @@ -2984,16 +2980,16 @@ void Image::Deinterlace_Discard() } else { Error("Deinterlace called with unexpected colours: %d", colours); } - + } void Image::Deinterlace_Linear() { /* Simple deinterlacing. The odd lines are average of the line above and line below */ - + const uint8_t *pbelow, *pabove; uint8_t *pcurrent; - + if ( colours == ZM_COLOUR_GRAY8 ) { for (unsigned int y = 1; y < (unsigned int)(height-1); y += 2) @@ -3060,15 +3056,15 @@ void Image::Deinterlace_Linear() } else { Error("Deinterlace called with unexpected colours: %d", colours); } - + } void Image::Deinterlace_Blend() { /* Simple deinterlacing. Blend the fields together. 50% blend */ - + uint8_t *pabove, *pcurrent; - + if ( colours == ZM_COLOUR_GRAY8 ) { for (unsigned int y = 1; y < (unsigned int)height; y += 2) @@ -3117,7 +3113,7 @@ void Image::Deinterlace_Blend() } else { Error("Deinterlace called with unexpected colours: %d", colours); } - + } void Image::Deinterlace_Blend_CustomRatio(int divider) @@ -3127,14 +3123,14 @@ void Image::Deinterlace_Blend_CustomRatio(int divider) /* 2 = 25% blending */ /* 3 = 12.% blending */ /* 4 = 6.25% blending */ - + uint8_t *pabove, *pcurrent; uint8_t subpix1, subpix2; - + if ( divider < 1 || divider > 4 ) { Error("Deinterlace called with invalid blend ratio"); } - + if ( colours == ZM_COLOUR_GRAY8 ) { for (unsigned int y = 1; y < (unsigned int)height; y += 2) @@ -3199,7 +3195,7 @@ void Image::Deinterlace_Blend_CustomRatio(int divider) } else { Error("Deinterlace called with unexpected colours: %d", colours); } - + } @@ -3209,44 +3205,44 @@ void Image::Deinterlace_4Field(const Image* next_image, unsigned int threshold) { Panic( "Attempt to deinterlace different sized images, expected %dx%dx%d %d, got %dx%dx%d %d", width, height, colours, subpixelorder, next_image->width, next_image->height, next_image->colours, next_image->subpixelorder); } - + switch(colours) { case ZM_COLOUR_RGB24: - { - if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { - /* BGR subpixel order */ - std_deinterlace_4field_bgr(buffer, next_image->buffer, threshold, width, height); - } else { - /* Assume RGB subpixel order */ - std_deinterlace_4field_rgb(buffer, next_image->buffer, threshold, width, height); - } - break; - } + { + if(subpixelorder == ZM_SUBPIX_ORDER_BGR) { + /* BGR subpixel order */ + std_deinterlace_4field_bgr(buffer, next_image->buffer, threshold, width, height); + } else { + /* Assume RGB subpixel order */ + std_deinterlace_4field_rgb(buffer, next_image->buffer, threshold, width, height); + } + break; + } case ZM_COLOUR_RGB32: - { - if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { - /* ARGB subpixel order */ - (*fptr_deinterlace_4field_argb)(buffer, next_image->buffer, threshold, width, height); - } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { - /* ABGR subpixel order */ - (*fptr_deinterlace_4field_abgr)(buffer, next_image->buffer, threshold, width, height); - } else if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { - /* BGRA subpixel order */ - (*fptr_deinterlace_4field_bgra)(buffer, next_image->buffer, threshold, width, height); - } else { - /* Assume RGBA subpixel order */ - (*fptr_deinterlace_4field_rgba)(buffer, next_image->buffer, threshold, width, height); - } - break; - } + { + if(subpixelorder == ZM_SUBPIX_ORDER_ARGB) { + /* ARGB subpixel order */ + (*fptr_deinterlace_4field_argb)(buffer, next_image->buffer, threshold, width, height); + } else if(subpixelorder == ZM_SUBPIX_ORDER_ABGR) { + /* ABGR subpixel order */ + (*fptr_deinterlace_4field_abgr)(buffer, next_image->buffer, threshold, width, height); + } else if(subpixelorder == ZM_SUBPIX_ORDER_BGRA) { + /* BGRA subpixel order */ + (*fptr_deinterlace_4field_bgra)(buffer, next_image->buffer, threshold, width, height); + } else { + /* Assume RGBA subpixel order */ + (*fptr_deinterlace_4field_rgba)(buffer, next_image->buffer, threshold, width, height); + } + break; + } case ZM_COLOUR_GRAY8: - (*fptr_deinterlace_4field_gray8)(buffer, next_image->buffer, threshold, width, height); - break; + (*fptr_deinterlace_4field_gray8)(buffer, next_image->buffer, threshold, width, height); + break; default: - Panic("Deinterlace_4Field called with unexpected colours: %d",colours); - break; + Panic("Deinterlace_4Field called with unexpected colours: %d",colours); + break; } - + } @@ -3261,7 +3257,7 @@ void sse2_fastblend(const uint8_t* col1, const uint8_t* col2, uint8_t* result, u static uint32_t divider = 0; static uint32_t clearmask = 0; static double current_blendpercent = 0.0; - + if(current_blendpercent != blendpercent) { /* Attempt to match the blending percent to one of the possible values */ if(blendpercent < 2.34375) { @@ -3293,29 +3289,29 @@ void sse2_fastblend(const uint8_t* col1, const uint8_t* col2, uint8_t* result, u } __asm__ __volatile__( - "movd %4, %%xmm3\n\t" - "movd %5, %%xmm4\n\t" - "pshufd $0x0, %%xmm3, %%xmm3\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x10, %2\n\t" - "sse2_fastblend_iter:\n\t" - "movdqa (%0,%3),%%xmm0\n\t" - "movdqa %%xmm0,%%xmm2\n\t" - "movdqa (%1,%3),%%xmm1\n\t" - "psrlq %%xmm4,%%xmm0\n\t" - "psrlq %%xmm4,%%xmm1\n\t" - "pand %%xmm3,%%xmm1\n\t" - "pand %%xmm3,%%xmm0\n\t" - "psubb %%xmm0,%%xmm1\n\t" - "paddb %%xmm2,%%xmm1\n\t" - "movntdq %%xmm1,(%2,%3)\n\t" - "sub $0x10, %3\n\t" - "jnz sse2_fastblend_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count), "m" (clearmask), "m" (divider) - : "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" - ); + "movd %4, %%xmm3\n\t" + "movd %5, %%xmm4\n\t" + "pshufd $0x0, %%xmm3, %%xmm3\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x10, %2\n\t" + "sse2_fastblend_iter:\n\t" + "movdqa (%0,%3),%%xmm0\n\t" + "movdqa %%xmm0,%%xmm2\n\t" + "movdqa (%1,%3),%%xmm1\n\t" + "psrlq %%xmm4,%%xmm0\n\t" + "psrlq %%xmm4,%%xmm1\n\t" + "pand %%xmm3,%%xmm1\n\t" + "pand %%xmm3,%%xmm0\n\t" + "psubb %%xmm0,%%xmm1\n\t" + "paddb %%xmm2,%%xmm1\n\t" + "movntdq %%xmm1,(%2,%3)\n\t" + "sub $0x10, %3\n\t" + "jnz sse2_fastblend_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count), "m" (clearmask), "m" (divider) + : "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -3325,7 +3321,7 @@ __attribute__((noinline)) void std_fastblend(const uint8_t* col1, const uint8_t* static int divider = 0; static double current_blendpercent = 0.0; const uint8_t* const max_ptr = result + count; - + if(current_blendpercent != blendpercent) { /* Attempt to match the blending percent to one of the possible values */ if(blendpercent < 2.34375) { @@ -3349,7 +3345,7 @@ __attribute__((noinline)) void std_fastblend(const uint8_t* col1, const uint8_t* } current_blendpercent = blendpercent; } - + while(result < max_ptr) { result[0] = ((col2[0] - col1[0])>>divider) + col1[0]; @@ -3368,7 +3364,7 @@ __attribute__((noinline)) void std_fastblend(const uint8_t* col1, const uint8_t* result[13] = ((col2[13] - col1[13])>>divider) + col1[13]; result[14] = ((col2[14] - col1[14])>>divider) + col1[14]; result[15] = ((col2[15] - col1[15])>>divider) + col1[15]; - + col1 += 16; col2 += 16; result += 16; @@ -3548,10 +3544,10 @@ __attribute__((noinline)) void std_blend(const uint8_t* col1, const uint8_t* col double divide = blendpercent / 100.0; double opacity = 1.0 - divide; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { *result++ = (*col1++ * opacity) + (*col2++ * divide); - + } } @@ -3561,7 +3557,7 @@ __attribute__((noinline)) void std_blend(const uint8_t* col1, const uint8_t* col __attribute__((noinline)) void std_delta8_gray8(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count) { /* Loop unrolling is used to work on 16 bytes (16 grayscale pixels) at a time */ const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { result[0] = abs(col1[0] - col2[0]); result[1] = abs(col1[1] - col2[1]); @@ -3579,7 +3575,7 @@ __attribute__((noinline)) void std_delta8_gray8(const uint8_t* col1, const uint8 result[13] = abs(col1[13] - col2[13]); result[14] = abs(col1[14] - col2[14]); result[15] = abs(col1[15] - col2[15]); - + col1 += 16; col2 += 16; result += 16; @@ -3591,7 +3587,7 @@ __attribute__((noinline)) void std_delta8_rgb(const uint8_t* col1, const uint8_t /* Loop unrolling is used to work on 12 bytes (4 rgb24 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = abs(col1[0] - col2[0]); g = abs(col1[1] - col2[1]); @@ -3609,7 +3605,7 @@ __attribute__((noinline)) void std_delta8_rgb(const uint8_t* col1, const uint8_t g = abs(col1[10] - col2[10]); b = abs(col1[11] - col2[11]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 12; col2 += 12; result += 4; @@ -3621,7 +3617,7 @@ __attribute__((noinline)) void std_delta8_bgr(const uint8_t* col1, const uint8_t /* Loop unrolling is used to work on 12 bytes (4 rgb24 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = abs(col1[0] - col2[0]); g = abs(col1[1] - col2[1]); @@ -3639,7 +3635,7 @@ __attribute__((noinline)) void std_delta8_bgr(const uint8_t* col1, const uint8_t g = abs(col1[10] - col2[10]); r = abs(col1[11] - col2[11]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 12; col2 += 12; result += 4; @@ -3651,7 +3647,7 @@ __attribute__((noinline)) void std_delta8_rgba(const uint8_t* col1, const uint8_ /* Loop unrolling is used to work on 16 bytes (4 rgb32 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = abs(col1[0] - col2[0]); g = abs(col1[1] - col2[1]); @@ -3669,7 +3665,7 @@ __attribute__((noinline)) void std_delta8_rgba(const uint8_t* col1, const uint8_ g = abs(col1[13] - col2[13]); b = abs(col1[14] - col2[14]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; col2 += 16; result += 4; @@ -3681,7 +3677,7 @@ __attribute__((noinline)) void std_delta8_bgra(const uint8_t* col1, const uint8_ /* Loop unrolling is used to work on 16 bytes (4 rgb32 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = abs(col1[0] - col2[0]); g = abs(col1[1] - col2[1]); @@ -3699,7 +3695,7 @@ __attribute__((noinline)) void std_delta8_bgra(const uint8_t* col1, const uint8_ g = abs(col1[13] - col2[13]); r = abs(col1[14] - col2[14]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; col2 += 16; result += 4; @@ -3711,7 +3707,7 @@ __attribute__((noinline)) void std_delta8_argb(const uint8_t* col1, const uint8_ /* Loop unrolling is used to work on 16 bytes (4 rgb32 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = abs(col1[1] - col2[1]); g = abs(col1[2] - col2[2]); @@ -3729,7 +3725,7 @@ __attribute__((noinline)) void std_delta8_argb(const uint8_t* col1, const uint8_ g = abs(col1[14] - col2[14]); b = abs(col1[15] - col2[15]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; col2 += 16; result += 4; @@ -3741,7 +3737,7 @@ __attribute__((noinline)) void std_delta8_abgr(const uint8_t* col1, const uint8_ /* Loop unrolling is used to work on 16 bytes (4 rgb32 pixels) at a time */ int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = abs(col1[1] - col2[1]); g = abs(col1[2] - col2[2]); @@ -3759,7 +3755,7 @@ __attribute__((noinline)) void std_delta8_abgr(const uint8_t* col1, const uint8_ g = abs(col1[14] - col2[14]); r = abs(col1[15] - col2[15]); result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; col2 += 16; result += 4; @@ -4003,24 +3999,24 @@ void sse2_delta8_gray8(const uint8_t* col1, const uint8_t* col2, uint8_t* result #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) __asm__ __volatile__ ( - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x10, %2\n\t" - "sse2_delta8_gray8_iter:\n\t" - "movdqa (%0,%3), %%xmm1\n\t" - "movdqa (%1,%3), %%xmm2\n\t" - "movdqa %%xmm1, %%xmm3\n\t" - "movdqa %%xmm2, %%xmm4\n\t" - "pmaxub %%xmm1, %%xmm2\n\t" - "pminub %%xmm3, %%xmm4\n\t" - "psubb %%xmm4, %%xmm2\n\t" - "movntdq %%xmm2, (%2,%3)\n\t" - "sub $0x10, %3\n\t" - "jnz sse2_delta8_gray8_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count) - : "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" - ); + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x10, %2\n\t" + "sse2_delta8_gray8_iter:\n\t" + "movdqa (%0,%3), %%xmm1\n\t" + "movdqa (%1,%3), %%xmm2\n\t" + "movdqa %%xmm1, %%xmm3\n\t" + "movdqa %%xmm2, %%xmm4\n\t" + "pmaxub %%xmm1, %%xmm2\n\t" + "pminub %%xmm3, %%xmm4\n\t" + "psubb %%xmm4, %%xmm2\n\t" + "movntdq %%xmm2, (%2,%3)\n\t" + "sub $0x10, %3\n\t" + "jnz sse2_delta8_gray8_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count) + : "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4032,53 +4028,53 @@ __attribute__((noinline,__target__("sse2"))) #endif void sse2_delta8_rgba(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count) { #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) - + __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov $0xff, %%eax\n\t" - "movd %%eax, %%xmm0\n\t" - "pshufd $0x0, %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x4, %2\n\t" - "sse2_delta8_rgba_iter:\n\t" - "movdqa (%0,%3,4), %%xmm1\n\t" - "movdqa (%1,%3,4), %%xmm2\n\t" - "psrlq $0x3, %%xmm1\n\t" - "psrlq $0x3, %%xmm2\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pand %%xmm4, %%xmm2\n\t" - "movdqa %%xmm1, %%xmm5\n\t" - "movdqa %%xmm2, %%xmm6\n\t" - "pmaxub %%xmm1, %%xmm2\n\t" - "pminub %%xmm5, %%xmm6\n\t" - "psubb %%xmm6, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm3\n\t" - "psrld $0x8, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm1\n\t" - "pslld $0x2, %%xmm2\n\t" - "paddd %%xmm1, %%xmm2\n\t" - "movdqa %%xmm3, %%xmm1\n\t" - "pand %%xmm0, %%xmm1\n\t" - "paddd %%xmm1, %%xmm1\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "movdqa %%xmm3, %%xmm2\n\t" - "psrld $0x10, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "packssdw %%xmm1, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%2,%3)\n\t" - "sub $0x4, %3\n\t" - "jnz sse2_delta8_rgba_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count) - : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov $0xff, %%eax\n\t" + "movd %%eax, %%xmm0\n\t" + "pshufd $0x0, %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x4, %2\n\t" + "sse2_delta8_rgba_iter:\n\t" + "movdqa (%0,%3,4), %%xmm1\n\t" + "movdqa (%1,%3,4), %%xmm2\n\t" + "psrlq $0x3, %%xmm1\n\t" + "psrlq $0x3, %%xmm2\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pand %%xmm4, %%xmm2\n\t" + "movdqa %%xmm1, %%xmm5\n\t" + "movdqa %%xmm2, %%xmm6\n\t" + "pmaxub %%xmm1, %%xmm2\n\t" + "pminub %%xmm5, %%xmm6\n\t" + "psubb %%xmm6, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm3\n\t" + "psrld $0x8, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm1\n\t" + "pslld $0x2, %%xmm2\n\t" + "paddd %%xmm1, %%xmm2\n\t" + "movdqa %%xmm3, %%xmm1\n\t" + "pand %%xmm0, %%xmm1\n\t" + "paddd %%xmm1, %%xmm1\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "movdqa %%xmm3, %%xmm2\n\t" + "psrld $0x10, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "packssdw %%xmm1, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%2,%3)\n\t" + "sub $0x4, %3\n\t" + "jnz sse2_delta8_rgba_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count) + : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4090,53 +4086,53 @@ __attribute__((noinline,__target__("sse2"))) #endif void sse2_delta8_bgra(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count) { #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) - + __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov $0xff, %%eax\n\t" - "movd %%eax, %%xmm0\n\t" - "pshufd $0x0, %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x4, %2\n\t" - "sse2_delta8_bgra_iter:\n\t" - "movdqa (%0,%3,4), %%xmm1\n\t" - "movdqa (%1,%3,4), %%xmm2\n\t" - "psrlq $0x3, %%xmm1\n\t" - "psrlq $0x3, %%xmm2\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pand %%xmm4, %%xmm2\n\t" - "movdqa %%xmm1, %%xmm5\n\t" - "movdqa %%xmm2, %%xmm6\n\t" - "pmaxub %%xmm1, %%xmm2\n\t" - "pminub %%xmm5, %%xmm6\n\t" - "psubb %%xmm6, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm3\n\t" - "psrld $0x8, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm1\n\t" - "pslld $0x2, %%xmm2\n\t" - "paddd %%xmm1, %%xmm2\n\t" - "movdqa %%xmm3, %%xmm1\n\t" - "pand %%xmm0, %%xmm1\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "movdqa %%xmm3, %%xmm2\n\t" - "psrld $0x10, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "paddd %%xmm2, %%xmm2\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "packssdw %%xmm1, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%2,%3)\n\t" - "sub $0x4, %3\n\t" - "jnz sse2_delta8_bgra_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count) - : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov $0xff, %%eax\n\t" + "movd %%eax, %%xmm0\n\t" + "pshufd $0x0, %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x4, %2\n\t" + "sse2_delta8_bgra_iter:\n\t" + "movdqa (%0,%3,4), %%xmm1\n\t" + "movdqa (%1,%3,4), %%xmm2\n\t" + "psrlq $0x3, %%xmm1\n\t" + "psrlq $0x3, %%xmm2\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pand %%xmm4, %%xmm2\n\t" + "movdqa %%xmm1, %%xmm5\n\t" + "movdqa %%xmm2, %%xmm6\n\t" + "pmaxub %%xmm1, %%xmm2\n\t" + "pminub %%xmm5, %%xmm6\n\t" + "psubb %%xmm6, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm3\n\t" + "psrld $0x8, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm1\n\t" + "pslld $0x2, %%xmm2\n\t" + "paddd %%xmm1, %%xmm2\n\t" + "movdqa %%xmm3, %%xmm1\n\t" + "pand %%xmm0, %%xmm1\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "movdqa %%xmm3, %%xmm2\n\t" + "psrld $0x10, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "paddd %%xmm2, %%xmm2\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "packssdw %%xmm1, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%2,%3)\n\t" + "sub $0x4, %3\n\t" + "jnz sse2_delta8_bgra_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count) + : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4148,54 +4144,54 @@ __attribute__((noinline,__target__("sse2"))) #endif void sse2_delta8_argb(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count) { #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) - + __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov $0xff, %%eax\n\t" - "movd %%eax, %%xmm0\n\t" - "pshufd $0x0, %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x4, %2\n\t" - "sse2_delta8_argb_iter:\n\t" - "movdqa (%0,%3,4), %%xmm1\n\t" - "movdqa (%1,%3,4), %%xmm2\n\t" - "psrlq $0x3, %%xmm1\n\t" - "psrlq $0x3, %%xmm2\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pand %%xmm4, %%xmm2\n\t" - "movdqa %%xmm1, %%xmm5\n\t" - "movdqa %%xmm2, %%xmm6\n\t" - "pmaxub %%xmm1, %%xmm2\n\t" - "pminub %%xmm5, %%xmm6\n\t" - "psubb %%xmm6, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm3\n\t" - "psrld $0x10, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm1\n\t" - "pslld $0x2, %%xmm2\n\t" - "paddd %%xmm1, %%xmm2\n\t" - "movdqa %%xmm3, %%xmm1\n\t" - "psrld $0x8, %%xmm1\n\t" - "pand %%xmm0, %%xmm1\n\t" - "paddd %%xmm1, %%xmm1\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "movdqa %%xmm3, %%xmm2\n\t" - "psrld $0x18, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "packssdw %%xmm1, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%2,%3)\n\t" - "sub $0x4, %3\n\t" - "jnz sse2_delta8_argb_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count) - : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov $0xff, %%eax\n\t" + "movd %%eax, %%xmm0\n\t" + "pshufd $0x0, %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x4, %2\n\t" + "sse2_delta8_argb_iter:\n\t" + "movdqa (%0,%3,4), %%xmm1\n\t" + "movdqa (%1,%3,4), %%xmm2\n\t" + "psrlq $0x3, %%xmm1\n\t" + "psrlq $0x3, %%xmm2\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pand %%xmm4, %%xmm2\n\t" + "movdqa %%xmm1, %%xmm5\n\t" + "movdqa %%xmm2, %%xmm6\n\t" + "pmaxub %%xmm1, %%xmm2\n\t" + "pminub %%xmm5, %%xmm6\n\t" + "psubb %%xmm6, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm3\n\t" + "psrld $0x10, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm1\n\t" + "pslld $0x2, %%xmm2\n\t" + "paddd %%xmm1, %%xmm2\n\t" + "movdqa %%xmm3, %%xmm1\n\t" + "psrld $0x8, %%xmm1\n\t" + "pand %%xmm0, %%xmm1\n\t" + "paddd %%xmm1, %%xmm1\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "movdqa %%xmm3, %%xmm2\n\t" + "psrld $0x18, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "packssdw %%xmm1, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%2,%3)\n\t" + "sub $0x4, %3\n\t" + "jnz sse2_delta8_argb_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count) + : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4207,54 +4203,54 @@ __attribute__((noinline,__target__("sse2"))) #endif void sse2_delta8_abgr(const uint8_t* col1, const uint8_t* col2, uint8_t* result, unsigned long count) { #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) - + __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov $0xff, %%eax\n\t" - "movd %%eax, %%xmm0\n\t" - "pshufd $0x0, %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x4, %2\n\t" - "sse2_delta8_abgr_iter:\n\t" - "movdqa (%0,%3,4), %%xmm1\n\t" - "movdqa (%1,%3,4), %%xmm2\n\t" - "psrlq $0x3, %%xmm1\n\t" - "psrlq $0x3, %%xmm2\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pand %%xmm4, %%xmm2\n\t" - "movdqa %%xmm1, %%xmm5\n\t" - "movdqa %%xmm2, %%xmm6\n\t" - "pmaxub %%xmm1, %%xmm2\n\t" - "pminub %%xmm5, %%xmm6\n\t" - "psubb %%xmm6, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm3\n\t" - "psrld $0x10, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "movdqa %%xmm2, %%xmm1\n\t" - "pslld $0x2, %%xmm2\n\t" - "paddd %%xmm1, %%xmm2\n\t" - "movdqa %%xmm3, %%xmm1\n\t" - "psrld $0x8, %%xmm1\n\t" - "pand %%xmm0, %%xmm1\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "movdqa %%xmm3, %%xmm2\n\t" - "psrld $0x18, %%xmm2\n\t" - "pand %%xmm0, %%xmm2\n\t" - "paddd %%xmm2, %%xmm2\n\t" - "paddd %%xmm2, %%xmm1\n\t" - "packssdw %%xmm1, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%2,%3)\n\t" - "sub $0x4, %3\n\t" - "jnz sse2_delta8_abgr_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count) - : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov $0xff, %%eax\n\t" + "movd %%eax, %%xmm0\n\t" + "pshufd $0x0, %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x4, %2\n\t" + "sse2_delta8_abgr_iter:\n\t" + "movdqa (%0,%3,4), %%xmm1\n\t" + "movdqa (%1,%3,4), %%xmm2\n\t" + "psrlq $0x3, %%xmm1\n\t" + "psrlq $0x3, %%xmm2\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pand %%xmm4, %%xmm2\n\t" + "movdqa %%xmm1, %%xmm5\n\t" + "movdqa %%xmm2, %%xmm6\n\t" + "pmaxub %%xmm1, %%xmm2\n\t" + "pminub %%xmm5, %%xmm6\n\t" + "psubb %%xmm6, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm3\n\t" + "psrld $0x10, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "movdqa %%xmm2, %%xmm1\n\t" + "pslld $0x2, %%xmm2\n\t" + "paddd %%xmm1, %%xmm2\n\t" + "movdqa %%xmm3, %%xmm1\n\t" + "psrld $0x8, %%xmm1\n\t" + "pand %%xmm0, %%xmm1\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "movdqa %%xmm3, %%xmm2\n\t" + "psrld $0x18, %%xmm2\n\t" + "pand %%xmm0, %%xmm2\n\t" + "paddd %%xmm2, %%xmm2\n\t" + "paddd %%xmm2, %%xmm1\n\t" + "packssdw %%xmm1, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%2,%3)\n\t" + "sub $0x4, %3\n\t" + "jnz sse2_delta8_abgr_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count) + : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4274,36 +4270,36 @@ void ssse3_delta8_rgb32(const uint8_t* col1, const uint8_t* col2, uint8_t* resul /* XMM4 - divide mask */ __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov %4, %%eax\n\t" - "movd %%eax, %%xmm3\n\t" - "pshufd $0x0, %%xmm3, %%xmm3\n\t" - "pxor %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x10, %1\n\t" - "sub $0x4, %2\n\t" - "ssse3_delta8_rgb32_iter:\n\t" - "movdqa (%0,%3,4), %%xmm1\n\t" - "movdqa (%1,%3,4), %%xmm2\n\t" - "psrlq $0x3, %%xmm1\n\t" - "psrlq $0x3, %%xmm2\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pand %%xmm4, %%xmm2\n\t" - "psubb %%xmm2, %%xmm1\n\t" - "pabsb %%xmm1, %%xmm1\n\t" - "pmaddubsw %%xmm3, %%xmm1\n\t" - "phaddw %%xmm0, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%2,%3)\n\t" - "sub $0x4, %3\n\t" - "jnz ssse3_delta8_rgb32_iter\n\t" - : - : "r" (col1), "r" (col2), "r" (result), "r" (count), "g" (multiplier) - : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov %4, %%eax\n\t" + "movd %%eax, %%xmm3\n\t" + "pshufd $0x0, %%xmm3, %%xmm3\n\t" + "pxor %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x10, %1\n\t" + "sub $0x4, %2\n\t" + "ssse3_delta8_rgb32_iter:\n\t" + "movdqa (%0,%3,4), %%xmm1\n\t" + "movdqa (%1,%3,4), %%xmm2\n\t" + "psrlq $0x3, %%xmm1\n\t" + "psrlq $0x3, %%xmm2\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pand %%xmm4, %%xmm2\n\t" + "psubb %%xmm2, %%xmm1\n\t" + "pabsb %%xmm1, %%xmm1\n\t" + "pmaddubsw %%xmm3, %%xmm1\n\t" + "phaddw %%xmm0, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%2,%3)\n\t" + "sub $0x4, %3\n\t" + "jnz ssse3_delta8_rgb32_iter\n\t" + : + : "r" (col1), "r" (col2), "r" (result), "r" (count), "g" (multiplier) + : "%eax", "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4336,7 +4332,7 @@ void ssse3_delta8_abgr(const uint8_t* col1, const uint8_t* col2, uint8_t* result __attribute__((noinline)) void std_convert_rgb_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = col1[0]; g = col1[1]; @@ -4354,7 +4350,7 @@ __attribute__((noinline)) void std_convert_rgb_gray8(const uint8_t* col1, uint8_ g = col1[10]; b = col1[11]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 12; result += 4; } @@ -4364,7 +4360,7 @@ __attribute__((noinline)) void std_convert_rgb_gray8(const uint8_t* col1, uint8_ __attribute__((noinline)) void std_convert_bgr_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = col1[0]; g = col1[1]; @@ -4382,7 +4378,7 @@ __attribute__((noinline)) void std_convert_bgr_gray8(const uint8_t* col1, uint8_ g = col1[10]; r = col1[11]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 12; result += 4; } @@ -4392,7 +4388,7 @@ __attribute__((noinline)) void std_convert_bgr_gray8(const uint8_t* col1, uint8_ __attribute__((noinline)) void std_convert_rgba_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = col1[0]; g = col1[1]; @@ -4410,7 +4406,7 @@ __attribute__((noinline)) void std_convert_rgba_gray8(const uint8_t* col1, uint8 g = col1[13]; b = col1[14]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; result += 4; } @@ -4420,7 +4416,7 @@ __attribute__((noinline)) void std_convert_rgba_gray8(const uint8_t* col1, uint8 __attribute__((noinline)) void std_convert_bgra_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = col1[0]; g = col1[1]; @@ -4438,7 +4434,7 @@ __attribute__((noinline)) void std_convert_bgra_gray8(const uint8_t* col1, uint8 g = col1[13]; r = col1[14]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; result += 4; } @@ -4448,7 +4444,7 @@ __attribute__((noinline)) void std_convert_bgra_gray8(const uint8_t* col1, uint8 __attribute__((noinline)) void std_convert_argb_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { r = col1[1]; g = col1[2]; @@ -4466,7 +4462,7 @@ __attribute__((noinline)) void std_convert_argb_gray8(const uint8_t* col1, uint8 g = col1[14]; b = col1[15]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; result += 4; } @@ -4476,7 +4472,7 @@ __attribute__((noinline)) void std_convert_argb_gray8(const uint8_t* col1, uint8 __attribute__((noinline)) void std_convert_abgr_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { unsigned int r,g,b; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { b = col1[1]; g = col1[2]; @@ -4494,7 +4490,7 @@ __attribute__((noinline)) void std_convert_abgr_gray8(const uint8_t* col1, uint8 g = col1[14]; r = col1[15]; result[3] = (r + r + b + g + g + g + g + g)>>3; - + col1 += 16; result += 4; } @@ -4504,7 +4500,7 @@ __attribute__((noinline)) void std_convert_abgr_gray8(const uint8_t* col1, uint8 __attribute__((noinline)) void std_convert_yuyv_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { const uint16_t* yuvbuf = (const uint16_t*)col1; const uint8_t* const max_ptr = result + count; - + while(result < max_ptr) { result[0] = (uint8_t)yuvbuf[0]; result[1] = (uint8_t)yuvbuf[1]; @@ -4522,7 +4518,7 @@ __attribute__((noinline)) void std_convert_yuyv_gray8(const uint8_t* col1, uint8 result[13] = (uint8_t)yuvbuf[13]; result[14] = (uint8_t)yuvbuf[14]; result[15] = (uint8_t)yuvbuf[15]; - + yuvbuf += 16; result += 16; } @@ -4541,30 +4537,30 @@ void ssse3_convert_rgb32_gray8(const uint8_t* col1, uint8_t* result, unsigned lo /* XMM4 - divide mask */ __asm__ __volatile__ ( - "mov $0x1F1F1F1F, %%eax\n\t" - "movd %%eax, %%xmm4\n\t" - "pshufd $0x0, %%xmm4, %%xmm4\n\t" - "mov %3, %%eax\n\t" - "movd %%eax, %%xmm3\n\t" - "pshufd $0x0, %%xmm3, %%xmm3\n\t" - "pxor %%xmm0, %%xmm0\n\t" - "sub $0x10, %0\n\t" - "sub $0x4, %1\n\t" - "ssse3_convert_rgb32_gray8_iter:\n\t" - "movdqa (%0,%2,4), %%xmm1\n\t" - "psrlq $0x3, %%xmm1\n\t" - "pand %%xmm4, %%xmm1\n\t" - "pmaddubsw %%xmm3, %%xmm1\n\t" - "phaddw %%xmm0, %%xmm1\n\t" - "packuswb %%xmm1, %%xmm1\n\t" - "movd %%xmm1, %%eax\n\t" - "movnti %%eax, (%1,%2)\n\t" - "sub $0x4, %2\n\t" - "jnz ssse3_convert_rgb32_gray8_iter\n\t" - : - : "r" (col1), "r" (result), "r" (count), "g" (multiplier) - : "%eax", "%xmm0", "%xmm1", "%xmm3", "%xmm4", "cc", "memory" - ); + "mov $0x1F1F1F1F, %%eax\n\t" + "movd %%eax, %%xmm4\n\t" + "pshufd $0x0, %%xmm4, %%xmm4\n\t" + "mov %3, %%eax\n\t" + "movd %%eax, %%xmm3\n\t" + "pshufd $0x0, %%xmm3, %%xmm3\n\t" + "pxor %%xmm0, %%xmm0\n\t" + "sub $0x10, %0\n\t" + "sub $0x4, %1\n\t" + "ssse3_convert_rgb32_gray8_iter:\n\t" + "movdqa (%0,%2,4), %%xmm1\n\t" + "psrlq $0x3, %%xmm1\n\t" + "pand %%xmm4, %%xmm1\n\t" + "pmaddubsw %%xmm3, %%xmm1\n\t" + "phaddw %%xmm0, %%xmm1\n\t" + "packuswb %%xmm1, %%xmm1\n\t" + "movd %%xmm1, %%eax\n\t" + "movnti %%eax, (%1,%2)\n\t" + "sub $0x4, %2\n\t" + "jnz ssse3_convert_rgb32_gray8_iter\n\t" + : + : "r" (col1), "r" (result), "r" (count), "g" (multiplier) + : "%eax", "%xmm0", "%xmm1", "%xmm3", "%xmm4", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4597,10 +4593,10 @@ __attribute__((noinline,__target__("ssse3"))) void ssse3_convert_yuyv_gray8(const uint8_t* col1, uint8_t* result, unsigned long count) { #if ((defined(__i386__) || defined(__x86_64__) || defined(ZM_KEEP_SSE)) && !defined(ZM_STRIP_SSE)) unsigned long i = 0; - + __attribute__((aligned(16))) static const uint8_t movemask1[16] = {0,2,4,6,8,10,12,14,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF}; __attribute__((aligned(16))) static const uint8_t movemask2[16] = {0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0,2,4,6,8,10,12,14}; - + /* XMM0 - General purpose */ /* XMM1 - General purpose */ /* XMM2 - unused */ @@ -4611,28 +4607,28 @@ void ssse3_convert_yuyv_gray8(const uint8_t* col1, uint8_t* result, unsigned lon /* XMM7 - unused */ __asm__ __volatile__ ( - "movdqa %4, %%xmm3\n\t" - "movdqa %5, %%xmm4\n\t" - "algo_ssse3_convert_yuyv_gray8:\n\t" - "movdqa (%0), %%xmm0\n\t" - "pshufb %%xmm3, %%xmm0\n\t" - "movdqa 0x10(%0), %%xmm1\n\t" - "pshufb %%xmm4, %%xmm1\n\t" - "por %%xmm1, %%xmm0\n\t" - "movntdq %%xmm0, (%1)\n\t" - "add $0x10, %3\n\t" - "add $0x10, %1\n\t" - "add $0x20, %0\n\t" - "cmp %2, %3\n\t" - "jb algo_ssse3_convert_yuyv_gray8\n\t" - : + "movdqa %4, %%xmm3\n\t" + "movdqa %5, %%xmm4\n\t" + "algo_ssse3_convert_yuyv_gray8:\n\t" + "movdqa (%0), %%xmm0\n\t" + "pshufb %%xmm3, %%xmm0\n\t" + "movdqa 0x10(%0), %%xmm1\n\t" + "pshufb %%xmm4, %%xmm1\n\t" + "por %%xmm1, %%xmm0\n\t" + "movntdq %%xmm0, (%1)\n\t" + "add $0x10, %3\n\t" + "add $0x10, %1\n\t" + "add $0x20, %0\n\t" + "cmp %2, %3\n\t" + "jb algo_ssse3_convert_yuyv_gray8\n\t" + : #if (defined(_DEBUG) && !defined(__x86_64__)) /* Use one less register to allow compilation to success on 32bit with omit frame pointer disabled */ - : "r" (col1), "r" (result), "m" (count), "r" (i), "m" (*movemask1), "m" (*movemask2) + : "r" (col1), "r" (result), "m" (count), "r" (i), "m" (*movemask1), "m" (*movemask2) #else - : "r" (col1), "r" (result), "r" (count), "r" (i), "m" (*movemask1), "m" (*movemask2) + : "r" (col1), "r" (result), "r" (count), "r" (i), "m" (*movemask1), "m" (*movemask2) #endif - : "%xmm3", "%xmm4", "cc", "memory" - ); + : "%xmm3", "%xmm4", "cc", "memory" + ); #else Panic("SSE function called on a non x86\\x86-64 platform"); #endif @@ -4651,11 +4647,11 @@ __attribute__((noinline)) void zm_convert_yuyv_rgb(const uint8_t* col1, uint8_t* r = y1 + r_v_table[v]; g = y1 - (g_u_table[u]+g_v_table[v]); b = y1 + b_u_table[u]; - + result[0] = r<0?0:(r>255?255:r); result[1] = g<0?0:(g>255?255:g); result[2] = b<0?0:(b>255?255:b); - + r = y2 + r_v_table[v]; g = y2 - (g_u_table[u]+g_v_table[v]); b = y2 + b_u_table[u]; @@ -4664,7 +4660,7 @@ __attribute__((noinline)) void zm_convert_yuyv_rgb(const uint8_t* col1, uint8_t* result[4] = g<0?0:(g>255?255:g); result[5] = b<0?0:(b>255?255:b); } - + } /* YUYV to RGBA - modified the one above */ @@ -4680,11 +4676,11 @@ __attribute__((noinline)) void zm_convert_yuyv_rgba(const uint8_t* col1, uint8_t r = y1 + r_v_table[v]; g = y1 - (g_u_table[u]+g_v_table[v]); b = y1 + b_u_table[u]; - + result[0] = r<0?0:(r>255?255:r); result[1] = g<0?0:(g>255?255:g); result[2] = b<0?0:(b>255?255:b); - + r = y2 + r_v_table[v]; g = y2 - (g_u_table[u]+g_v_table[v]); b = y2 + b_u_table[u]; @@ -4693,7 +4689,7 @@ __attribute__((noinline)) void zm_convert_yuyv_rgba(const uint8_t* col1, uint8_t result[5] = g<0?0:(g>255?255:g); result[6] = b<0?0:(b>255?255:b); } - + } /* RGB555 to RGB24 - relocated from zm_local_camera.cpp */ @@ -4780,9 +4776,9 @@ __attribute__((noinline)) void std_deinterlace_4field_gray8(uint8_t* col1, uint8 pabove += width; pnabove += width; pbelow += width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + width; while(pcurrent < max_ptr2) { @@ -4839,29 +4835,29 @@ __attribute__((noinline)) void std_deinterlace_4field_rgb(uint8_t* col1, uint8_t pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - r = abs(pnabove[0] - pabove[0]); - g = abs(pnabove[1] - pabove[1]); - b = abs(pnabove[2] - pabove[2]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - r = abs(pncurrent[0] - pcurrent[0]); - g = abs(pncurrent[1] - pcurrent[1]); - b = abs(pncurrent[2] - pcurrent[2]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[0] = pabove[0]; - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - } - pabove += 3; - pnabove += 3; - pcurrent += 3; - pncurrent += 3; + r = abs(pnabove[0] - pabove[0]); + g = abs(pnabove[1] - pabove[1]); + b = abs(pnabove[2] - pabove[2]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + r = abs(pncurrent[0] - pcurrent[0]); + g = abs(pncurrent[1] - pcurrent[1]); + b = abs(pncurrent[2] - pcurrent[2]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[0] = pabove[0]; + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + } + pabove += 3; + pnabove += 3; + pcurrent += 3; + pncurrent += 3; } } @@ -4908,29 +4904,29 @@ __attribute__((noinline)) void std_deinterlace_4field_bgr(uint8_t* col1, uint8_t pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - b = abs(pnabove[0] - pabove[0]); - g = abs(pnabove[1] - pabove[1]); - r = abs(pnabove[2] - pabove[2]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - b = abs(pncurrent[0] - pcurrent[0]); - g = abs(pncurrent[1] - pcurrent[1]); - r = abs(pncurrent[2] - pcurrent[2]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[0] = pabove[0]; - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - } - pabove += 3; - pnabove += 3; - pcurrent += 3; - pncurrent += 3; + b = abs(pnabove[0] - pabove[0]); + g = abs(pnabove[1] - pabove[1]); + r = abs(pnabove[2] - pabove[2]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + b = abs(pncurrent[0] - pcurrent[0]); + g = abs(pncurrent[1] - pcurrent[1]); + r = abs(pncurrent[2] - pcurrent[2]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[0] = pabove[0]; + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + } + pabove += 3; + pnabove += 3; + pcurrent += 3; + pncurrent += 3; } } @@ -4977,29 +4973,29 @@ __attribute__((noinline)) void std_deinterlace_4field_rgba(uint8_t* col1, uint8_ pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - r = abs(pnabove[0] - pabove[0]); - g = abs(pnabove[1] - pabove[1]); - b = abs(pnabove[2] - pabove[2]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - r = abs(pncurrent[0] - pcurrent[0]); - g = abs(pncurrent[1] - pcurrent[1]); - b = abs(pncurrent[2] - pcurrent[2]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[0] = pabove[0]; - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - } - pabove += 4; - pnabove += 4; - pcurrent += 4; - pncurrent += 4; + r = abs(pnabove[0] - pabove[0]); + g = abs(pnabove[1] - pabove[1]); + b = abs(pnabove[2] - pabove[2]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + r = abs(pncurrent[0] - pcurrent[0]); + g = abs(pncurrent[1] - pcurrent[1]); + b = abs(pncurrent[2] - pcurrent[2]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[0] = pabove[0]; + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + } + pabove += 4; + pnabove += 4; + pcurrent += 4; + pncurrent += 4; } } @@ -5046,29 +5042,29 @@ __attribute__((noinline)) void std_deinterlace_4field_bgra(uint8_t* col1, uint8_ pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - b = abs(pnabove[0] - pabove[0]); - g = abs(pnabove[1] - pabove[1]); - r = abs(pnabove[2] - pabove[2]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - b = abs(pncurrent[0] - pcurrent[0]); - g = abs(pncurrent[1] - pcurrent[1]); - r = abs(pncurrent[2] - pcurrent[2]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[0] = pabove[0]; - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - } - pabove += 4; - pnabove += 4; - pcurrent += 4; - pncurrent += 4; + b = abs(pnabove[0] - pabove[0]); + g = abs(pnabove[1] - pabove[1]); + r = abs(pnabove[2] - pabove[2]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + b = abs(pncurrent[0] - pcurrent[0]); + g = abs(pncurrent[1] - pcurrent[1]); + r = abs(pncurrent[2] - pcurrent[2]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[0] = pabove[0]; + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + } + pabove += 4; + pnabove += 4; + pcurrent += 4; + pncurrent += 4; } } @@ -5115,29 +5111,29 @@ __attribute__((noinline)) void std_deinterlace_4field_argb(uint8_t* col1, uint8_ pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - r = abs(pnabove[1] - pabove[1]); - g = abs(pnabove[2] - pabove[2]); - b = abs(pnabove[3] - pabove[3]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - r = abs(pncurrent[1] - pcurrent[1]); - g = abs(pncurrent[2] - pcurrent[2]); - b = abs(pncurrent[3] - pcurrent[3]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - pcurrent[3] = pabove[3]; - } - pabove += 4; - pnabove += 4; - pcurrent += 4; - pncurrent += 4; + r = abs(pnabove[1] - pabove[1]); + g = abs(pnabove[2] - pabove[2]); + b = abs(pnabove[3] - pabove[3]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + r = abs(pncurrent[1] - pcurrent[1]); + g = abs(pncurrent[2] - pcurrent[2]); + b = abs(pncurrent[3] - pcurrent[3]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + pcurrent[3] = pabove[3]; + } + pabove += 4; + pnabove += 4; + pcurrent += 4; + pncurrent += 4; } } @@ -5184,28 +5180,28 @@ __attribute__((noinline)) void std_deinterlace_4field_abgr(uint8_t* col1, uint8_ pabove += row_width; pnabove += row_width; pbelow += row_width; - + } - + /* Special case for the last line */ max_ptr2 = pcurrent + row_width; while(pcurrent < max_ptr2) { - b = abs(pnabove[1] - pabove[1]); - g = abs(pnabove[2] - pabove[2]); - r = abs(pnabove[3] - pabove[3]); - delta1 = (r + r + b + g + g + g + g + g)>>3; - b = abs(pncurrent[1] - pcurrent[1]); - g = abs(pncurrent[2] - pcurrent[2]); - r = abs(pncurrent[3] - pcurrent[3]); - delta2 = (r + r + b + g + g + g + g + g)>>3; - if(((delta1 + delta2) >> 1) >= threshold) { - pcurrent[1] = pabove[1]; - pcurrent[2] = pabove[2]; - pcurrent[3] = pabove[3]; - } - pabove += 4; - pnabove += 4; - pcurrent += 4; - pncurrent += 4; + b = abs(pnabove[1] - pabove[1]); + g = abs(pnabove[2] - pabove[2]); + r = abs(pnabove[3] - pabove[3]); + delta1 = (r + r + b + g + g + g + g + g)>>3; + b = abs(pncurrent[1] - pcurrent[1]); + g = abs(pncurrent[2] - pcurrent[2]); + r = abs(pncurrent[3] - pcurrent[3]); + delta2 = (r + r + b + g + g + g + g + g)>>3; + if(((delta1 + delta2) >> 1) >= threshold) { + pcurrent[1] = pabove[1]; + pcurrent[2] = pabove[2]; + pcurrent[3] = pabove[3]; + } + pabove += 4; + pnabove += 4; + pcurrent += 4; + pncurrent += 4; } } diff --git a/src/zm_libvlc_camera.cpp b/src/zm_libvlc_camera.cpp index c43ffffe0..a4135d352 100644 --- a/src/zm_libvlc_camera.cpp +++ b/src/zm_libvlc_camera.cpp @@ -27,11 +27,11 @@ void* LibvlcLockBuffer(void* opaque, void** planes) { LibvlcPrivateData* data = (LibvlcPrivateData*)opaque; data->mutex.lock(); - + uint8_t* buffer = data->buffer; data->buffer = data->prevBuffer; data->prevBuffer = buffer; - + *planes = data->buffer; return NULL; } @@ -39,7 +39,7 @@ void* LibvlcLockBuffer(void* opaque, void** planes) void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes) { LibvlcPrivateData* data = (LibvlcPrivateData*)opaque; - + bool newFrame = false; for(uint32_t i = 0; i < data->bufferSize; i++) { @@ -50,7 +50,7 @@ void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes) } } data->mutex.unlock(); - + time_t now; time(&now); // Return frames slightly faster than 1fps (if time() supports greater than one second resolution) @@ -89,7 +89,7 @@ LibvlcCamera::LibvlcCamera( int p_id, const std::string &p_path, const std::stri } else { Panic("Unexpected colours: %d",colours); } - + if ( capture ) { Initialise(); @@ -143,9 +143,9 @@ void LibvlcCamera::Terminate() int LibvlcCamera::PrimeCapture() { Info("Priming capture from %s", mPath.c_str()); - + StringVector opVect = split(Options(), ","); - + // Set transport method as specified by method field, rtpUni is default if ( Method() == "rtpMulti" ) opVect.push_back("--rtsp-mcast"); @@ -168,11 +168,11 @@ int LibvlcCamera::PrimeCapture() mLibvlcInstance = libvlc_new (opVect.size(), (const char* const*)mOptArgV); if(mLibvlcInstance == NULL) Fatal("Unable to create libvlc instance due to: %s", libvlc_errmsg()); - + mLibvlcMedia = libvlc_media_new_location(mLibvlcInstance, mPath.c_str()); if(mLibvlcMedia == NULL) Fatal("Unable to open input %s due to: %s", mPath.c_str(), libvlc_errmsg()); - + mLibvlcMediaPlayer = libvlc_media_player_new_from_media(mLibvlcMedia); if(mLibvlcMediaPlayer == NULL) Fatal("Unable to create player for %s due to: %s", mPath.c_str(), libvlc_errmsg()); @@ -188,12 +188,12 @@ int LibvlcCamera::PrimeCapture() mLibvlcData.newImage.setValueImmediate(false); libvlc_media_player_play(mLibvlcMediaPlayer); - + return(0); } int LibvlcCamera::PreCapture() -{ +{ return(0); } @@ -207,12 +207,12 @@ int LibvlcCamera::Capture( Image &image ) image.Assign(width, height, colours, subpixelorder, mLibvlcData.buffer, width * height * mBpp); mLibvlcData.newImage.setValueImmediate(false); mLibvlcData.mutex.unlock(); - + return (0); } // Should not return -1 as cancels capture. Always wait for image if available. -int LibvlcCamera::CaptureAndRecord( Image &image, bool recording, char* event_directory ) +int LibvlcCamera::CaptureAndRecord(Image &image, timeval recording, char* event_directory) { while(!mLibvlcData.newImage.getValueImmediate()) mLibvlcData.newImage.getUpdatedValue(1); diff --git a/src/zm_libvlc_camera.h b/src/zm_libvlc_camera.h index d1df60f3b..4221bd0b7 100644 --- a/src/zm_libvlc_camera.h +++ b/src/zm_libvlc_camera.h @@ -70,7 +70,7 @@ public: int PrimeCapture(); int PreCapture(); int Capture( Image &image ); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ); + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ); int PostCapture(); }; diff --git a/src/zm_local_camera.cpp b/src/zm_local_camera.cpp index 17bac85f9..f13733b11 100644 --- a/src/zm_local_camera.cpp +++ b/src/zm_local_camera.cpp @@ -55,7 +55,7 @@ static int vidioctl( int fd, int request, void *arg ) static _AVPIXELFORMAT getFfPixFormatFromV4lPalette( int v4l_version, int palette ) { _AVPIXELFORMAT pixFormat = AV_PIX_FMT_NONE; - + #if ZM_HAS_V4L2 if ( v4l_version == 2 ) { @@ -114,27 +114,27 @@ static _AVPIXELFORMAT getFfPixFormatFromV4lPalette( int v4l_version, int palette case V4L2_PIX_FMT_UYVY : pixFormat = AV_PIX_FMT_UYVY422; break; - // These don't seem to have ffmpeg equivalents - // See if you can match any of the ones in the default clause below!? + // These don't seem to have ffmpeg equivalents + // See if you can match any of the ones in the default clause below!? case V4L2_PIX_FMT_RGB332 : case V4L2_PIX_FMT_RGB555X : case V4L2_PIX_FMT_RGB565X : - //case V4L2_PIX_FMT_Y16 : - //case V4L2_PIX_FMT_PAL8 : + //case V4L2_PIX_FMT_Y16 : + //case V4L2_PIX_FMT_PAL8 : case V4L2_PIX_FMT_YVU410 : case V4L2_PIX_FMT_YVU420 : case V4L2_PIX_FMT_Y41P : - //case V4L2_PIX_FMT_YUV555 : - //case V4L2_PIX_FMT_YUV565 : - //case V4L2_PIX_FMT_YUV32 : + //case V4L2_PIX_FMT_YUV555 : + //case V4L2_PIX_FMT_YUV565 : + //case V4L2_PIX_FMT_YUV32 : case V4L2_PIX_FMT_NV12 : case V4L2_PIX_FMT_NV21 : case V4L2_PIX_FMT_YYUV : case V4L2_PIX_FMT_HI240 : case V4L2_PIX_FMT_HM12 : - //case V4L2_PIX_FMT_SBGGR8 : - //case V4L2_PIX_FMT_SGBRG8 : - //case V4L2_PIX_FMT_SBGGR16 : + //case V4L2_PIX_FMT_SBGGR8 : + //case V4L2_PIX_FMT_SGBRG8 : + //case V4L2_PIX_FMT_SBGGR16 : case V4L2_PIX_FMT_DV : case V4L2_PIX_FMT_MPEG : case V4L2_PIX_FMT_WNVA : @@ -142,43 +142,43 @@ static _AVPIXELFORMAT getFfPixFormatFromV4lPalette( int v4l_version, int palette case V4L2_PIX_FMT_PWC1 : case V4L2_PIX_FMT_PWC2 : case V4L2_PIX_FMT_ET61X251 : - //case V4L2_PIX_FMT_SPCA501 : - //case V4L2_PIX_FMT_SPCA505 : - //case V4L2_PIX_FMT_SPCA508 : - //case V4L2_PIX_FMT_SPCA561 : - //case V4L2_PIX_FMT_PAC207 : - //case V4L2_PIX_FMT_PJPG : - //case V4L2_PIX_FMT_YVYU : + //case V4L2_PIX_FMT_SPCA501 : + //case V4L2_PIX_FMT_SPCA505 : + //case V4L2_PIX_FMT_SPCA508 : + //case V4L2_PIX_FMT_SPCA561 : + //case V4L2_PIX_FMT_PAC207 : + //case V4L2_PIX_FMT_PJPG : + //case V4L2_PIX_FMT_YVYU : default : - { - Fatal( "Can't find swscale format for palette %d", palette ); - break; - // These are all spare and may match some of the above - pixFormat = AV_PIX_FMT_YUVJ420P; - pixFormat = AV_PIX_FMT_YUVJ422P; - pixFormat = AV_PIX_FMT_UYVY422; - pixFormat = AV_PIX_FMT_UYYVYY411; - pixFormat = AV_PIX_FMT_BGR565; - pixFormat = AV_PIX_FMT_BGR555; - pixFormat = AV_PIX_FMT_BGR8; - pixFormat = AV_PIX_FMT_BGR4; - pixFormat = AV_PIX_FMT_BGR4_BYTE; - pixFormat = AV_PIX_FMT_RGB8; - pixFormat = AV_PIX_FMT_RGB4; - pixFormat = AV_PIX_FMT_RGB4_BYTE; - pixFormat = AV_PIX_FMT_NV12; - pixFormat = AV_PIX_FMT_NV21; - pixFormat = AV_PIX_FMT_RGB32_1; - pixFormat = AV_PIX_FMT_BGR32_1; - pixFormat = AV_PIX_FMT_GRAY16BE; - pixFormat = AV_PIX_FMT_GRAY16LE; - pixFormat = AV_PIX_FMT_YUV440P; - pixFormat = AV_PIX_FMT_YUVJ440P; - pixFormat = AV_PIX_FMT_YUVA420P; - //pixFormat = AV_PIX_FMT_VDPAU_H264; - //pixFormat = AV_PIX_FMT_VDPAU_MPEG1; - //pixFormat = AV_PIX_FMT_VDPAU_MPEG2; - } + { + Fatal( "Can't find swscale format for palette %d", palette ); + break; + // These are all spare and may match some of the above + pixFormat = AV_PIX_FMT_YUVJ420P; + pixFormat = AV_PIX_FMT_YUVJ422P; + pixFormat = AV_PIX_FMT_UYVY422; + pixFormat = AV_PIX_FMT_UYYVYY411; + pixFormat = AV_PIX_FMT_BGR565; + pixFormat = AV_PIX_FMT_BGR555; + pixFormat = AV_PIX_FMT_BGR8; + pixFormat = AV_PIX_FMT_BGR4; + pixFormat = AV_PIX_FMT_BGR4_BYTE; + pixFormat = AV_PIX_FMT_RGB8; + pixFormat = AV_PIX_FMT_RGB4; + pixFormat = AV_PIX_FMT_RGB4_BYTE; + pixFormat = AV_PIX_FMT_NV12; + pixFormat = AV_PIX_FMT_NV21; + pixFormat = AV_PIX_FMT_RGB32_1; + pixFormat = AV_PIX_FMT_BGR32_1; + pixFormat = AV_PIX_FMT_GRAY16BE; + pixFormat = AV_PIX_FMT_GRAY16LE; + pixFormat = AV_PIX_FMT_YUV440P; + pixFormat = AV_PIX_FMT_YUVJ440P; + pixFormat = AV_PIX_FMT_YUVA420P; + //pixFormat = AV_PIX_FMT_VDPAU_H264; + //pixFormat = AV_PIX_FMT_VDPAU_MPEG1; + //pixFormat = AV_PIX_FMT_VDPAU_MPEG2; + } } } #endif // ZM_HAS_V4L2 @@ -188,17 +188,17 @@ static _AVPIXELFORMAT getFfPixFormatFromV4lPalette( int v4l_version, int palette switch( palette ) { case VIDEO_PALETTE_RGB32 : - if(BigEndian) - pixFormat = AV_PIX_FMT_ARGB; - else - pixFormat = AV_PIX_FMT_BGRA; - break; + if(BigEndian) + pixFormat = AV_PIX_FMT_ARGB; + else + pixFormat = AV_PIX_FMT_BGRA; + break; case VIDEO_PALETTE_RGB24 : - if(BigEndian) - pixFormat = AV_PIX_FMT_RGB24; - else - pixFormat = AV_PIX_FMT_BGR24; - break; + if(BigEndian) + pixFormat = AV_PIX_FMT_RGB24; + else + pixFormat = AV_PIX_FMT_BGR24; + break; case VIDEO_PALETTE_GREY : pixFormat = AV_PIX_FMT_GRAY8; break; @@ -219,36 +219,36 @@ static _AVPIXELFORMAT getFfPixFormatFromV4lPalette( int v4l_version, int palette pixFormat = AV_PIX_FMT_YUV420P; break; default : - { - Fatal( "Can't find swscale format for palette %d", palette ); - break; - // These are all spare and may match some of the above - pixFormat = AV_PIX_FMT_YUVJ420P; - pixFormat = AV_PIX_FMT_YUVJ422P; - pixFormat = AV_PIX_FMT_YUVJ444P; - pixFormat = AV_PIX_FMT_UYVY422; - pixFormat = AV_PIX_FMT_UYYVYY411; - pixFormat = AV_PIX_FMT_BGR565; - pixFormat = AV_PIX_FMT_BGR555; - pixFormat = AV_PIX_FMT_BGR8; - pixFormat = AV_PIX_FMT_BGR4; - pixFormat = AV_PIX_FMT_BGR4_BYTE; - pixFormat = AV_PIX_FMT_RGB8; - pixFormat = AV_PIX_FMT_RGB4; - pixFormat = AV_PIX_FMT_RGB4_BYTE; - pixFormat = AV_PIX_FMT_NV12; - pixFormat = AV_PIX_FMT_NV21; - pixFormat = AV_PIX_FMT_RGB32_1; - pixFormat = AV_PIX_FMT_BGR32_1; - pixFormat = AV_PIX_FMT_GRAY16BE; - pixFormat = AV_PIX_FMT_GRAY16LE; - pixFormat = AV_PIX_FMT_YUV440P; - pixFormat = AV_PIX_FMT_YUVJ440P; - pixFormat = AV_PIX_FMT_YUVA420P; - //pixFormat = AV_PIX_FMT_VDPAU_H264; - //pixFormat = AV_PIX_FMT_VDPAU_MPEG1; - //pixFormat = AV_PIX_FMT_VDPAU_MPEG2; - } + { + Fatal( "Can't find swscale format for palette %d", palette ); + break; + // These are all spare and may match some of the above + pixFormat = AV_PIX_FMT_YUVJ420P; + pixFormat = AV_PIX_FMT_YUVJ422P; + pixFormat = AV_PIX_FMT_YUVJ444P; + pixFormat = AV_PIX_FMT_UYVY422; + pixFormat = AV_PIX_FMT_UYYVYY411; + pixFormat = AV_PIX_FMT_BGR565; + pixFormat = AV_PIX_FMT_BGR555; + pixFormat = AV_PIX_FMT_BGR8; + pixFormat = AV_PIX_FMT_BGR4; + pixFormat = AV_PIX_FMT_BGR4_BYTE; + pixFormat = AV_PIX_FMT_RGB8; + pixFormat = AV_PIX_FMT_RGB4; + pixFormat = AV_PIX_FMT_RGB4_BYTE; + pixFormat = AV_PIX_FMT_NV12; + pixFormat = AV_PIX_FMT_NV21; + pixFormat = AV_PIX_FMT_RGB32_1; + pixFormat = AV_PIX_FMT_BGR32_1; + pixFormat = AV_PIX_FMT_GRAY16BE; + pixFormat = AV_PIX_FMT_GRAY16LE; + pixFormat = AV_PIX_FMT_YUV440P; + pixFormat = AV_PIX_FMT_YUVJ440P; + pixFormat = AV_PIX_FMT_YUVA420P; + //pixFormat = AV_PIX_FMT_VDPAU_H264; + //pixFormat = AV_PIX_FMT_VDPAU_MPEG1; + //pixFormat = AV_PIX_FMT_VDPAU_MPEG2; + } } } #endif // ZM_HAS_V4L1 @@ -319,7 +319,7 @@ LocalCamera::LocalCamera( v4l_version = (p_method=="v4l2"?2:1); v4l_multi_buffer = p_v4l_multi_buffer; v4l_captures_per_frame = p_v4l_captures_per_frame; - + if ( capture ) { if ( device_prime ) @@ -340,9 +340,9 @@ LocalCamera::LocalCamera( // We are the second, or subsequent, input using this channel channel_prime = false; } - + } - + /* The V4L1 API doesn't care about endianness, we need to check the endianness of the machine */ uint32_t checkval = 0xAABBCCDD; if(*(unsigned char*)&checkval == 0xDD) { @@ -355,7 +355,7 @@ LocalCamera::LocalCamera( Error("Unable to detect the processor's endianness. Assuming little-endian."); BigEndian = 0; } - + #if ZM_HAS_V4L2 if( v4l_version == 2 && palette == 0 ) { /* Use automatic format selection */ @@ -371,22 +371,22 @@ LocalCamera::LocalCamera( } } #endif - + if( capture ) { if ( last_camera ) { if ( (p_method == "v4l2" && v4l_version != 2) || (p_method == "v4l1" && v4l_version != 1) ) Fatal( "Different Video For Linux version used for monitors sharing same device" ); - + if ( standard != last_camera->standard ) Warning( "Different video standards defined for monitors sharing same device, results may be unpredictable or completely wrong" ); - + if ( palette != last_camera->palette ) Warning( "Different video palettes defined for monitors sharing same device, results may be unpredictable or completely wrong" ); - + if ( width != last_camera->width || height != last_camera->height ) Warning( "Different capture sizes defined for monitors sharing same device, results may be unpredictable or completely wrong" ); } - + #if HAVE_LIBSWSCALE /* Get ffmpeg pixel format based on capture palette and endianness */ capturePixFormat = getFfPixFormatFromV4lPalette( v4l_version, palette ); @@ -398,39 +398,39 @@ LocalCamera::LocalCamera( #if ZM_HAS_V4L2 if ( v4l_version == 2 ) { /* Try to find a match for the selected palette and target colourspace */ - + /* RGB32 palette and 32bit target colourspace */ if(palette == V4L2_PIX_FMT_RGB32 && colours == ZM_COLOUR_RGB32) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_ARGB; - - /* BGR32 palette and 32bit target colourspace */ + + /* BGR32 palette and 32bit target colourspace */ } else if(palette == V4L2_PIX_FMT_BGR32 && colours == ZM_COLOUR_RGB32) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_BGRA; - - /* RGB24 palette and 24bit target colourspace */ + + /* RGB24 palette and 24bit target colourspace */ } else if(palette == V4L2_PIX_FMT_RGB24 && colours == ZM_COLOUR_RGB24) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_RGB; - - /* BGR24 palette and 24bit target colourspace */ + + /* BGR24 palette and 24bit target colourspace */ } else if(palette == V4L2_PIX_FMT_BGR24 && colours == ZM_COLOUR_RGB24) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_BGR; - - /* Grayscale palette and grayscale target colourspace */ + + /* Grayscale palette and grayscale target colourspace */ } else if(palette == V4L2_PIX_FMT_GREY && colours == ZM_COLOUR_GRAY8) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_NONE; - /* Unable to find a solution for the selected palette and target colourspace. Conversion required. Notify the user of performance penalty */ + /* Unable to find a solution for the selected palette and target colourspace. Conversion required. Notify the user of performance penalty */ } else { if( capture ) #if HAVE_LIBSWSCALE Info("No direct match for the selected palette (0x%02hhx%02hhx%02hhx%02hhx) and target colorspace (%02u). Format conversion is required, performance penalty expected", (capturePixFormat>>24)&0xff,((capturePixFormat>>16)&0xff),((capturePixFormat>>8)&0xff),((capturePixFormat)&0xff), colours); #else - Info("No direct match for the selected palette and target colorspace. Format conversion is required, performance penalty expected"); + Info("No direct match for the selected palette and target colorspace. Format conversion is required, performance penalty expected"); #endif #if HAVE_LIBSWSCALE /* Try using swscale for the conversion */ @@ -470,13 +470,13 @@ LocalCamera::LocalCamera( if(colours == ZM_COLOUR_GRAY8 && palette == V4L2_PIX_FMT_YUYV) { conversion_type = 2; } - + /* JPEG */ if(palette == V4L2_PIX_FMT_JPEG || palette == V4L2_PIX_FMT_MJPEG) { Debug(2,"Using JPEG image decoding"); conversion_type = 3; } - + if(conversion_type == 2) { Debug(2,"Using ZM for image conversion"); if(palette == V4L2_PIX_FMT_RGB32 && colours == ZM_COLOUR_GRAY8) { @@ -521,11 +521,11 @@ LocalCamera::LocalCamera( } #endif // ZM_HAS_V4L2 - /* V4L1 format matching */ + /* V4L1 format matching */ #if ZM_HAS_V4L1 if ( v4l_version == 1) { /* Try to find a match for the selected palette and target colourspace */ - + /* RGB32 palette and 32bit target colourspace */ if(palette == VIDEO_PALETTE_RGB32 && colours == ZM_COLOUR_RGB32) { conversion_type = 0; @@ -534,8 +534,8 @@ LocalCamera::LocalCamera( } else { subpixelorder = ZM_SUBPIX_ORDER_BGRA; } - - /* RGB24 palette and 24bit target colourspace */ + + /* RGB24 palette and 24bit target colourspace */ } else if(palette == VIDEO_PALETTE_RGB24 && colours == ZM_COLOUR_RGB24) { conversion_type = 0; if(BigEndian) { @@ -543,12 +543,12 @@ LocalCamera::LocalCamera( } else { subpixelorder = ZM_SUBPIX_ORDER_BGR; } - - /* Grayscale palette and grayscale target colourspace */ + + /* Grayscale palette and grayscale target colourspace */ } else if(palette == VIDEO_PALETTE_GREY && colours == ZM_COLOUR_GRAY8) { conversion_type = 0; subpixelorder = ZM_SUBPIX_ORDER_NONE; - /* Unable to find a solution for the selected palette and target colourspace. Conversion required. Notify the user of performance penalty */ + /* Unable to find a solution for the selected palette and target colourspace. Conversion required. Notify the user of performance penalty */ } else { if( capture ) Info("No direct match for the selected palette and target colorspace. Format conversion is required, performance penalty expected"); @@ -586,7 +586,7 @@ LocalCamera::LocalCamera( if(colours == ZM_COLOUR_GRAY8 && (palette == VIDEO_PALETTE_YUYV || palette == VIDEO_PALETTE_YUV422)) { conversion_type = 2; } - + if(conversion_type == 2) { Debug(2,"Using ZM for image conversion"); if(palette == VIDEO_PALETTE_RGB32 && colours == ZM_COLOUR_GRAY8) { @@ -631,7 +631,7 @@ LocalCamera::LocalCamera( } } } -#endif // ZM_HAS_V4L1 +#endif // ZM_HAS_V4L1 last_camera = this; Debug(3,"Selected subpixelorder: %u",subpixelorder); @@ -655,13 +655,13 @@ LocalCamera::LocalCamera( if( (unsigned int)pSize != imagesize) { Fatal("Image size mismatch. Required: %d Available: %u",pSize,imagesize); } - + imgConversionContext = sws_getContext(width, height, capturePixFormat, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); - + if ( !imgConversionContext ) { Fatal( "Unable to initialise image scaling context" ); - } - + } + } #endif } @@ -670,18 +670,14 @@ LocalCamera::~LocalCamera() { if ( device_prime && capture ) Terminate(); - + #if HAVE_LIBSWSCALE /* Clean up swscale stuff */ if(capture && conversion_type == 1) { sws_freeContext(imgConversionContext); imgConversionContext = NULL; - -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) + av_frame_free( &tmpPicture ); -#else - av_freep( &tmpPicture ); -#endif } #endif } @@ -747,23 +743,23 @@ void LocalCamera::Initialise() v4l2_data.fmt.fmt.pix.height = height; v4l2_data.fmt.fmt.pix.pixelformat = palette; - if ( (extras & 0xff) != 0 ) - { - v4l2_data.fmt.fmt.pix.field = (v4l2_field)(extras & 0xff); - - if ( vidioctl( vid_fd, VIDIOC_S_FMT, &v4l2_data.fmt ) < 0 ) + if ( (extras & 0xff) != 0 ) { - Warning( "Failed to set V4L2 field to %d, falling back to auto", (extras & 0xff) ); - v4l2_data.fmt.fmt.pix.field = V4L2_FIELD_ANY; + v4l2_data.fmt.fmt.pix.field = (v4l2_field)(extras & 0xff); + + if ( vidioctl( vid_fd, VIDIOC_S_FMT, &v4l2_data.fmt ) < 0 ) + { + Warning( "Failed to set V4L2 field to %d, falling back to auto", (extras & 0xff) ); + v4l2_data.fmt.fmt.pix.field = V4L2_FIELD_ANY; + if ( vidioctl( vid_fd, VIDIOC_S_FMT, &v4l2_data.fmt ) < 0 ) { + Fatal( "Failed to set video format: %s", strerror(errno) ); + } + } + } else { if ( vidioctl( vid_fd, VIDIOC_S_FMT, &v4l2_data.fmt ) < 0 ) { Fatal( "Failed to set video format: %s", strerror(errno) ); } } - } else { - if ( vidioctl( vid_fd, VIDIOC_S_FMT, &v4l2_data.fmt ) < 0 ) { - Fatal( "Failed to set video format: %s", strerror(errno) ); - } - } /* Note VIDIOC_S_FMT may change width and height. */ Debug( 4, " v4l2_data.fmt.type = %08x", v4l2_data.fmt.type ); @@ -785,39 +781,39 @@ void LocalCamera::Initialise() if (v4l2_data.fmt.fmt.pix.sizeimage < min) v4l2_data.fmt.fmt.pix.sizeimage = min; - v4l2_jpegcompression jpeg_comp; - if(palette == V4L2_PIX_FMT_JPEG || palette == V4L2_PIX_FMT_MJPEG) { - if( vidioctl( vid_fd, VIDIOC_G_JPEGCOMP, &jpeg_comp ) < 0 ) { - if(errno == EINVAL) { - Debug(2, "JPEG compression options are not available"); - } else { - Warning("Failed to get JPEG compression options: %s", strerror(errno) ); - } - } else { - /* Set flags and quality. MJPEG should not have the huffman tables defined */ - if(palette == V4L2_PIX_FMT_MJPEG) { - jpeg_comp.jpeg_markers |= V4L2_JPEG_MARKER_DQT | V4L2_JPEG_MARKER_DRI; - } else { - jpeg_comp.jpeg_markers |= V4L2_JPEG_MARKER_DQT | V4L2_JPEG_MARKER_DRI | V4L2_JPEG_MARKER_DHT; - } - jpeg_comp.quality = 85; - - /* Update the JPEG options */ - if( vidioctl( vid_fd, VIDIOC_S_JPEGCOMP, &jpeg_comp ) < 0 ) { - Warning("Failed to set JPEG compression options: %s", strerror(errno) ); - } else { - if(vidioctl( vid_fd, VIDIOC_G_JPEGCOMP, &jpeg_comp ) < 0) { - Debug(3,"Failed to get updated JPEG compression options: %s", strerror(errno) ); + v4l2_jpegcompression jpeg_comp; + if(palette == V4L2_PIX_FMT_JPEG || palette == V4L2_PIX_FMT_MJPEG) { + if( vidioctl( vid_fd, VIDIOC_G_JPEGCOMP, &jpeg_comp ) < 0 ) { + if(errno == EINVAL) { + Debug(2, "JPEG compression options are not available"); } else { - Debug(4, "JPEG quality: %d",jpeg_comp.quality); - Debug(4, "JPEG markers: 0x%x",jpeg_comp.jpeg_markers); + Warning("Failed to get JPEG compression options: %s", strerror(errno) ); + } + } else { + /* Set flags and quality. MJPEG should not have the huffman tables defined */ + if(palette == V4L2_PIX_FMT_MJPEG) { + jpeg_comp.jpeg_markers |= V4L2_JPEG_MARKER_DQT | V4L2_JPEG_MARKER_DRI; + } else { + jpeg_comp.jpeg_markers |= V4L2_JPEG_MARKER_DQT | V4L2_JPEG_MARKER_DRI | V4L2_JPEG_MARKER_DHT; + } + jpeg_comp.quality = 85; + + /* Update the JPEG options */ + if( vidioctl( vid_fd, VIDIOC_S_JPEGCOMP, &jpeg_comp ) < 0 ) { + Warning("Failed to set JPEG compression options: %s", strerror(errno) ); + } else { + if(vidioctl( vid_fd, VIDIOC_G_JPEGCOMP, &jpeg_comp ) < 0) { + Debug(3,"Failed to get updated JPEG compression options: %s", strerror(errno) ); + } else { + Debug(4, "JPEG quality: %d",jpeg_comp.quality); + Debug(4, "JPEG markers: %#x",jpeg_comp.jpeg_markers); + } } } } - } Debug( 3, "Setting up request buffers" ); - + memset( &v4l2_data.reqbufs, 0, sizeof(v4l2_data.reqbufs) ); if ( channel_count > 1 ) { Debug( 3, "Channel count is %d", channel_count ); @@ -878,23 +874,23 @@ void LocalCamera::Initialise() #if HAVE_LIBSWSCALE #if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) - capturePictures[i] = av_frame_alloc(); + capturePictures[i] = av_frame_alloc(); #else - capturePictures[i] = avcodec_alloc_frame(); + capturePictures[i] = avcodec_alloc_frame(); #endif - if ( !capturePictures[i] ) - Fatal( "Could not allocate picture" ); + if ( !capturePictures[i] ) + Fatal( "Could not allocate picture" ); #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) - av_image_fill_arrays(capturePictures[i]->data, - capturePictures[i]->linesize, - (uint8_t*)v4l2_data.buffers[i].start,capturePixFormat, - v4l2_data.fmt.fmt.pix.width, - v4l2_data.fmt.fmt.pix.height, 1); + av_image_fill_arrays(capturePictures[i]->data, + capturePictures[i]->linesize, + (uint8_t*)v4l2_data.buffers[i].start,capturePixFormat, + v4l2_data.fmt.fmt.pix.width, + v4l2_data.fmt.fmt.pix.height, 1); #else - avpicture_fill( (AVPicture *)capturePictures[i], - (uint8_t*)v4l2_data.buffers[i].start, capturePixFormat, - v4l2_data.fmt.fmt.pix.width, - v4l2_data.fmt.fmt.pix.height ); + avpicture_fill( (AVPicture *)capturePictures[i], + (uint8_t*)v4l2_data.buffers[i].start, capturePixFormat, + v4l2_data.fmt.fmt.pix.width, + v4l2_data.fmt.fmt.pix.height ); #endif #endif // HAVE_LIBSWSCALE } @@ -953,30 +949,30 @@ void LocalCamera::Initialise() switch (vid_pic.palette = palette) { case VIDEO_PALETTE_RGB32 : - { - vid_pic.depth = 32; - break; - } + { + vid_pic.depth = 32; + break; + } case VIDEO_PALETTE_RGB24 : - { - vid_pic.depth = 24; - break; - } + { + vid_pic.depth = 24; + break; + } case VIDEO_PALETTE_GREY : - { - vid_pic.depth = 8; - break; - } + { + vid_pic.depth = 8; + break; + } case VIDEO_PALETTE_RGB565 : case VIDEO_PALETTE_YUYV : case VIDEO_PALETTE_YUV422 : case VIDEO_PALETTE_YUV420P : case VIDEO_PALETTE_YUV422P : default: - { - vid_pic.depth = 16; - break; - } + { + vid_pic.depth = 16; + break; + } } if ( brightness >= 0 ) vid_pic.brightness = brightness; @@ -1004,7 +1000,7 @@ void LocalCamera::Initialise() Debug( 4, "Old Y:%d", vid_win.y ); Debug( 4, "Old W:%d", vid_win.width ); Debug( 4, "Old H:%d", vid_win.height ); - + vid_win.x = 0; vid_win.y = 0; vid_win.width = width; @@ -1055,13 +1051,13 @@ void LocalCamera::Initialise() Fatal( "Could not allocate picture" ); #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) av_image_fill_arrays(capturePictures[i]->data, - capturePictures[i]->linesize, - (unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i], - capturePixFormat, width, height, 1); + capturePictures[i]->linesize, + (unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i], + capturePixFormat, width, height, 1); #else avpicture_fill( (AVPicture *)capturePictures[i], - (unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i], - capturePixFormat, width, height ); + (unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i], + capturePixFormat, width, height ); #endif } #endif // HAVE_LIBSWSCALE @@ -1100,7 +1096,7 @@ void LocalCamera::Initialise() Debug( 4, "New Y:%d", vid_win.y ); Debug( 4, "New W:%d", vid_win.width ); Debug( 4, "New H:%d", vid_win.height ); - + if ( ioctl( vid_fd, VIDIOCGPICT, &vid_pic) < 0 ) Fatal( "Failed to get window data: %s", strerror(errno) ); @@ -1121,7 +1117,7 @@ void LocalCamera::Terminate() { Debug( 3, "Terminating video stream" ); //enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - // enum v4l2_buf_type type = v4l2_data.fmt.type; + // enum v4l2_buf_type type = v4l2_data.fmt.type; enum v4l2_buf_type type = (v4l2_buf_type)v4l2_data.fmt.type; if ( vidioctl( vid_fd, VIDIOC_STREAMOFF, &type ) < 0 ) Error( "Failed to stop capture stream: %s", strerror(errno) ); @@ -1131,44 +1127,44 @@ void LocalCamera::Terminate() #if HAVE_LIBSWSCALE /* Free capture pictures */ #if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) - av_frame_free( &capturePictures[i] ); + av_frame_free( &capturePictures[i] ); #else - av_freep( &capturePictures[i] ); + av_freep( &capturePictures[i] ); #endif #endif if ( munmap( v4l2_data.buffers[i].start, v4l2_data.buffers[i].length ) < 0 ) Error( "Failed to munmap buffer %d: %s", i, strerror(errno) ); - } - + } + } else #endif // ZM_HAS_V4L2 #if ZM_HAS_V4L1 - if ( v4l_version == 1 ) - { + if ( v4l_version == 1 ) + { #if HAVE_LIBSWSCALE - for(int i=0; i < v4l1_data.frames.frames; i++) { - /* Free capture pictures */ + for(int i=0; i < v4l1_data.frames.frames; i++) { + /* Free capture pictures */ #if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) - av_frame_free( &capturePictures[i] ); + av_frame_free( &capturePictures[i] ); #else - av_freep( &capturePictures[i] ); + av_freep( &capturePictures[i] ); #endif - } + } #endif - - Debug( 3, "Unmapping video buffers" ); - if ( munmap((char*)v4l1_data.bufptr, v4l1_data.frames.size) < 0 ) - Error( "Failed to munmap buffers: %s", strerror(errno) ); - delete[] v4l1_data.buffers; - } + Debug( 3, "Unmapping video buffers" ); + if ( munmap((char*)v4l1_data.bufptr, v4l1_data.frames.size) < 0 ) + Error( "Failed to munmap buffers: %s", strerror(errno) ); + + delete[] v4l1_data.buffers; + } #endif // ZM_HAS_V4L1 close( vid_fd ); - + } uint32_t LocalCamera::AutoSelectFormat(int p_colours) { @@ -1181,13 +1177,13 @@ uint32_t LocalCamera::AutoSelectFormat(int p_colours) { unsigned int nIndex = 0; //int nRet = 0; // compiler say it isn't used int enum_fd; - + /* Open the device */ if ((enum_fd = open( device.c_str(), O_RDWR, 0 )) < 0) { Error( "Automatic format selection failed to open video device %s: %s", device.c_str(), strerror(errno) ); return selected_palette; } - + /* Enumerate available formats */ memset(&fmtinfo, 0, sizeof(fmtinfo)); fmtinfo.index = nIndex; @@ -1206,7 +1202,7 @@ uint32_t LocalCamera::AutoSelectFormat(int p_colours) { fmtinfo.index = ++nIndex; fmtinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; } - + /* Select format */ int nIndexUsed = -1; unsigned int n_preferedformats = 0; @@ -1238,17 +1234,17 @@ uint32_t LocalCamera::AutoSelectFormat(int p_colours) { } } } - + /* Have we found a match? */ if(nIndexUsed >= 0) { /* Found a match */ selected_palette = fmt_fcc[nIndexUsed]; strcpy(palette_desc,fmt_desc[nIndexUsed]); } - + /* Close the device */ close(enum_fd); - + #endif /* ZM_HAS_V4L2 */ return selected_palette; } @@ -1312,25 +1308,25 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers sprintf( output+strlen(output), " Bus: %s\n", vid_cap.bus_info ); sprintf( output+strlen(output), " Version: %u.%u.%u\n", (vid_cap.version>>16)&0xff, (vid_cap.version>>8)&0xff, vid_cap.version&0xff ); sprintf( output+strlen(output), " Type: 0x%x\n%s%s%s%s%s%s%s%s%s%s%s%s%s%s", vid_cap.capabilities, - capString( vid_cap.capabilities&V4L2_CAP_VIDEO_CAPTURE, " ", "Supports", "Does not support", "video capture (X)" ), - capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OUTPUT, " ", "Supports", "Does not support", "video output" ), - capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OVERLAY, " ", "Supports", "Does not support", "frame buffer overlay" ), - capString( vid_cap.capabilities&V4L2_CAP_VBI_CAPTURE, " ", "Supports", "Does not support", "VBI capture" ), - capString( vid_cap.capabilities&V4L2_CAP_VBI_OUTPUT, " ", "Supports", "Does not support", "VBI output" ), - capString( vid_cap.capabilities&V4L2_CAP_SLICED_VBI_CAPTURE, " ", "Supports", "Does not support", "sliced VBI capture" ), - capString( vid_cap.capabilities&V4L2_CAP_SLICED_VBI_OUTPUT, " ", "Supports", "Does not support", "sliced VBI output" ), + capString( vid_cap.capabilities&V4L2_CAP_VIDEO_CAPTURE, " ", "Supports", "Does not support", "video capture (X)" ), + capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OUTPUT, " ", "Supports", "Does not support", "video output" ), + capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OVERLAY, " ", "Supports", "Does not support", "frame buffer overlay" ), + capString( vid_cap.capabilities&V4L2_CAP_VBI_CAPTURE, " ", "Supports", "Does not support", "VBI capture" ), + capString( vid_cap.capabilities&V4L2_CAP_VBI_OUTPUT, " ", "Supports", "Does not support", "VBI output" ), + capString( vid_cap.capabilities&V4L2_CAP_SLICED_VBI_CAPTURE, " ", "Supports", "Does not support", "sliced VBI capture" ), + capString( vid_cap.capabilities&V4L2_CAP_SLICED_VBI_OUTPUT, " ", "Supports", "Does not support", "sliced VBI output" ), #ifdef V4L2_CAP_VIDEO_OUTPUT_OVERLAY - capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OUTPUT_OVERLAY, " ", "Supports", "Does not support", "video output overlay" ), + capString( vid_cap.capabilities&V4L2_CAP_VIDEO_OUTPUT_OVERLAY, " ", "Supports", "Does not support", "video output overlay" ), #else // V4L2_CAP_VIDEO_OUTPUT_OVERLAY - "", + "", #endif // V4L2_CAP_VIDEO_OUTPUT_OVERLAY - capString( vid_cap.capabilities&V4L2_CAP_TUNER, " ", "Has", "Does not have", "tuner" ), - capString( vid_cap.capabilities&V4L2_CAP_AUDIO, " ", "Has", "Does not have", "audio in and/or out" ), - capString( vid_cap.capabilities&V4L2_CAP_RADIO, " ", "Has", "Does not have", "radio" ), - capString( vid_cap.capabilities&V4L2_CAP_READWRITE, " ", "Supports", "Does not support", "read/write i/o (X)" ), - capString( vid_cap.capabilities&V4L2_CAP_ASYNCIO, " ", "Supports", "Does not support", "async i/o" ), - capString( vid_cap.capabilities&V4L2_CAP_STREAMING, " ", "Supports", "Does not support", "streaming i/o (X)" ) - ); + capString( vid_cap.capabilities&V4L2_CAP_TUNER, " ", "Has", "Does not have", "tuner" ), + capString( vid_cap.capabilities&V4L2_CAP_AUDIO, " ", "Has", "Does not have", "audio in and/or out" ), + capString( vid_cap.capabilities&V4L2_CAP_RADIO, " ", "Has", "Does not have", "radio" ), + capString( vid_cap.capabilities&V4L2_CAP_READWRITE, " ", "Supports", "Does not support", "read/write i/o (X)" ), + capString( vid_cap.capabilities&V4L2_CAP_ASYNCIO, " ", "Supports", "Does not support", "async i/o" ), + capString( vid_cap.capabilities&V4L2_CAP_STREAMING, " ", "Supports", "Does not support", "streaming i/o (X)" ) + ); } else { @@ -1342,7 +1338,7 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers } if ( verbose ) - sprintf( output+strlen(output), " Standards:\n" ); + sprintf( output+strlen(output), " Standards:\n" ); else sprintf( output+strlen(output), "S:" ); struct v4l2_standard standard; @@ -1371,14 +1367,14 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers } } if ( verbose ) - sprintf( output+strlen(output), " %s\n", standard.name ); + sprintf( output+strlen(output), " %s\n", standard.name ); else sprintf( output+strlen(output), "%s/", standard.name ); } while ( standardIndex++ >= 0 ); if ( !verbose && output[strlen(output)-1] == '/') output[strlen(output)-1] = '|'; - + if ( verbose ) sprintf( output+strlen(output), " Formats:\n" ); else @@ -1419,55 +1415,55 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers if ( !verbose ) output[strlen(output)-1] = '|'; - if(verbose) - sprintf( output+strlen(output), "Crop Capabilities\n" ); - - struct v4l2_cropcap cropcap; - memset( &cropcap, 0, sizeof(cropcap) ); - cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if ( vidioctl( vid_fd, VIDIOC_CROPCAP, &cropcap ) < 0 ) - { - if(errno != EINVAL) { - /* Failed querying crop capability, write error to the log and continue as if crop is not supported */ - Error( "Failed to query crop capabilities: %s", strerror(errno) ); - } - - if(verbose) { - sprintf( output+strlen(output), " Cropping is not supported\n"); - } else { - /* Send fake crop bounds to not confuse things parsing this, such as monitor probe */ - sprintf( output+strlen(output), "B:%dx%d|",0,0); - } - } else { - struct v4l2_crop crop; - memset( &crop, 0, sizeof(crop) ); - crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - - if ( vidioctl( vid_fd, VIDIOC_G_CROP, &crop ) < 0 ) - { - if ( errno != EINVAL ) + if(verbose) + sprintf( output+strlen(output), "Crop Capabilities\n" ); + + struct v4l2_cropcap cropcap; + memset( &cropcap, 0, sizeof(cropcap) ); + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if ( vidioctl( vid_fd, VIDIOC_CROPCAP, &cropcap ) < 0 ) { - /* Failed querying crop sizes, write error to the log and continue as if crop is not supported */ - Error( "Failed to query crop: %s", strerror(errno) ); - } - - if ( verbose ) { - sprintf( output+strlen(output), " Cropping is not supported\n"); + if(errno != EINVAL) { + /* Failed querying crop capability, write error to the log and continue as if crop is not supported */ + Error( "Failed to query crop capabilities: %s", strerror(errno) ); + } + + if(verbose) { + sprintf( output+strlen(output), " Cropping is not supported\n"); + } else { + /* Send fake crop bounds to not confuse things parsing this, such as monitor probe */ + sprintf( output+strlen(output), "B:%dx%d|",0,0); + } } else { - /* Send fake crop bounds to not confuse things parsing this, such as monitor probe */ - sprintf( output+strlen(output), "B:%dx%d|",0,0); - } - } else { - /* Cropping supported */ - if ( verbose ) { - sprintf( output+strlen(output), " Bounds: %d x %d\n", cropcap.bounds.width, cropcap.bounds.height ); - sprintf( output+strlen(output), " Default: %d x %d\n", cropcap.defrect.width, cropcap.defrect.height ); - sprintf( output+strlen(output), " Current: %d x %d\n", crop.c.width, crop.c.height ); - } else { - sprintf( output+strlen(output), "B:%dx%d|", cropcap.bounds.width, cropcap.bounds.height ); - } - } - } /* Crop code */ + struct v4l2_crop crop; + memset( &crop, 0, sizeof(crop) ); + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if ( vidioctl( vid_fd, VIDIOC_G_CROP, &crop ) < 0 ) + { + if ( errno != EINVAL ) + { + /* Failed querying crop sizes, write error to the log and continue as if crop is not supported */ + Error( "Failed to query crop: %s", strerror(errno) ); + } + + if ( verbose ) { + sprintf( output+strlen(output), " Cropping is not supported\n"); + } else { + /* Send fake crop bounds to not confuse things parsing this, such as monitor probe */ + sprintf( output+strlen(output), "B:%dx%d|",0,0); + } + } else { + /* Cropping supported */ + if ( verbose ) { + sprintf( output+strlen(output), " Bounds: %d x %d\n", cropcap.bounds.width, cropcap.bounds.height ); + sprintf( output+strlen(output), " Default: %d x %d\n", cropcap.defrect.width, cropcap.defrect.height ); + sprintf( output+strlen(output), " Current: %d x %d\n", crop.c.width, crop.c.height ); + } else { + sprintf( output+strlen(output), "B:%dx%d|", cropcap.bounds.width, cropcap.bounds.height ); + } + } + } /* Crop code */ struct v4l2_input input; int inputIndex = 0; @@ -1531,16 +1527,16 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers sprintf( output, "Error, failed to switch to input %d: %s\n", input.index, strerror(errno) ); else sprintf( output, "error%d\n", errno ); - return( false ); + return( false ); } if ( verbose ) { sprintf( output+strlen(output), " Input %d\n", input.index ); - sprintf( output+strlen(output), " Name: %s\n", input.name ); - sprintf( output+strlen(output), " Type: %s\n", input.type==V4L2_INPUT_TYPE_TUNER?"Tuner":(input.type==V4L2_INPUT_TYPE_CAMERA?"Camera":"Unknown") ); - sprintf( output+strlen(output), " Audioset: %08x\n", input.audioset ); - sprintf( output+strlen(output), " Standards: 0x%llx\n", input.std ); + sprintf( output+strlen(output), " Name: %s\n", input.name ); + sprintf( output+strlen(output), " Type: %s\n", input.type==V4L2_INPUT_TYPE_TUNER?"Tuner":(input.type==V4L2_INPUT_TYPE_CAMERA?"Camera":"Unknown") ); + sprintf( output+strlen(output), " Audioset: %08x\n", input.audioset ); + sprintf( output+strlen(output), " Standards: 0x%llx\n", input.std ); } else { @@ -1551,10 +1547,10 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers if ( verbose ) { - sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_POWER, "Power ", "off", "on", " (X)" ) ); - sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_SIGNAL, "Signal ", "not detected", "detected", " (X)" ) ); - sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_COLOR, "Colour Signal ", "not detected", "detected", "" ) ); - sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_H_LOCK, "Horizontal Lock ", "not detected", "detected", "" ) ); + sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_POWER, "Power ", "off", "on", " (X)" ) ); + sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_SIGNAL, "Signal ", "not detected", "detected", " (X)" ) ); + sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_COLOR, "Colour Signal ", "not detected", "detected", "" ) ); + sprintf( output+strlen(output), " %s", capString( input.status&V4L2_IN_ST_NO_H_LOCK, "Horizontal Lock ", "not detected", "detected", "" ) ); } else { @@ -1588,21 +1584,21 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers sprintf( output+strlen(output), "Video Capabilities\n" ); sprintf( output+strlen(output), " Name: %s\n", vid_cap.name ); sprintf( output+strlen(output), " Type: %d\n%s%s%s%s%s%s%s%s%s%s%s%s%s%s", vid_cap.type, - vid_cap.type&VID_TYPE_CAPTURE?" Can capture\n":"", - vid_cap.type&VID_TYPE_TUNER?" Can tune\n":"", - vid_cap.type&VID_TYPE_TELETEXT?" Does teletext\n":"", - vid_cap.type&VID_TYPE_OVERLAY?" Overlay onto frame buffer\n":"", - vid_cap.type&VID_TYPE_CHROMAKEY?" Overlay by chromakey\n":"", - vid_cap.type&VID_TYPE_CLIPPING?" Can clip\n":"", - vid_cap.type&VID_TYPE_FRAMERAM?" Uses the frame buffer memory\n":"", - vid_cap.type&VID_TYPE_SCALES?" Scalable\n":"", - vid_cap.type&VID_TYPE_MONOCHROME?" Monochrome only\n":"", - vid_cap.type&VID_TYPE_SUBCAPTURE?" Can capture subareas of the image\n":"", - vid_cap.type&VID_TYPE_MPEG_DECODER?" Can decode MPEG streams\n":"", - vid_cap.type&VID_TYPE_MPEG_ENCODER?" Can encode MPEG streams\n":"", - vid_cap.type&VID_TYPE_MJPEG_DECODER?" Can decode MJPEG streams\n":"", - vid_cap.type&VID_TYPE_MJPEG_ENCODER?" Can encode MJPEG streams\n":"" - ); + vid_cap.type&VID_TYPE_CAPTURE?" Can capture\n":"", + vid_cap.type&VID_TYPE_TUNER?" Can tune\n":"", + vid_cap.type&VID_TYPE_TELETEXT?" Does teletext\n":"", + vid_cap.type&VID_TYPE_OVERLAY?" Overlay onto frame buffer\n":"", + vid_cap.type&VID_TYPE_CHROMAKEY?" Overlay by chromakey\n":"", + vid_cap.type&VID_TYPE_CLIPPING?" Can clip\n":"", + vid_cap.type&VID_TYPE_FRAMERAM?" Uses the frame buffer memory\n":"", + vid_cap.type&VID_TYPE_SCALES?" Scalable\n":"", + vid_cap.type&VID_TYPE_MONOCHROME?" Monochrome only\n":"", + vid_cap.type&VID_TYPE_SUBCAPTURE?" Can capture subareas of the image\n":"", + vid_cap.type&VID_TYPE_MPEG_DECODER?" Can decode MPEG streams\n":"", + vid_cap.type&VID_TYPE_MPEG_ENCODER?" Can encode MPEG streams\n":"", + vid_cap.type&VID_TYPE_MJPEG_DECODER?" Can decode MJPEG streams\n":"", + vid_cap.type&VID_TYPE_MJPEG_ENCODER?" Can encode MJPEG streams\n":"" + ); sprintf( output+strlen(output), " Video Channels: %d\n", vid_cap.channels ); sprintf( output+strlen(output), " Audio Channels: %d\n", vid_cap.audios ); sprintf( output+strlen(output), " Maximum Width: %d\n", vid_cap.maxwidth ); @@ -1664,25 +1660,25 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers { sprintf( output+strlen(output), "Picture Attributes\n" ); sprintf( output+strlen(output), " Palette: %d - %s\n", vid_pic.palette, - vid_pic.palette==VIDEO_PALETTE_GREY?"Linear greyscale":( - vid_pic.palette==VIDEO_PALETTE_HI240?"High 240 cube (BT848)":( - vid_pic.palette==VIDEO_PALETTE_RGB565?"565 16 bit RGB":( - vid_pic.palette==VIDEO_PALETTE_RGB24?"24bit RGB":( - vid_pic.palette==VIDEO_PALETTE_RGB32?"32bit RGB":( - vid_pic.palette==VIDEO_PALETTE_RGB555?"555 15bit RGB":( - vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422 capture":( - vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":( - vid_pic.palette==VIDEO_PALETTE_UYVY?"UVYV":( - vid_pic.palette==VIDEO_PALETTE_YUV420?"YUV420":( - vid_pic.palette==VIDEO_PALETTE_YUV411?"YUV411 capture":( - vid_pic.palette==VIDEO_PALETTE_RAW?"RAW capture (BT848)":( - vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":( - vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422":( - vid_pic.palette==VIDEO_PALETTE_YUV422P?"YUV 4:2:2 Planar":( - vid_pic.palette==VIDEO_PALETTE_YUV411P?"YUV 4:1:1 Planar":( - vid_pic.palette==VIDEO_PALETTE_YUV420P?"YUV 4:2:0 Planar":( - vid_pic.palette==VIDEO_PALETTE_YUV410P?"YUV 4:1:0 Planar":"Unknown" - )))))))))))))))))); + vid_pic.palette==VIDEO_PALETTE_GREY?"Linear greyscale":( + vid_pic.palette==VIDEO_PALETTE_HI240?"High 240 cube (BT848)":( + vid_pic.palette==VIDEO_PALETTE_RGB565?"565 16 bit RGB":( + vid_pic.palette==VIDEO_PALETTE_RGB24?"24bit RGB":( + vid_pic.palette==VIDEO_PALETTE_RGB32?"32bit RGB":( + vid_pic.palette==VIDEO_PALETTE_RGB555?"555 15bit RGB":( + vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422 capture":( + vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":( + vid_pic.palette==VIDEO_PALETTE_UYVY?"UVYV":( + vid_pic.palette==VIDEO_PALETTE_YUV420?"YUV420":( + vid_pic.palette==VIDEO_PALETTE_YUV411?"YUV411 capture":( + vid_pic.palette==VIDEO_PALETTE_RAW?"RAW capture (BT848)":( + vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":( + vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422":( + vid_pic.palette==VIDEO_PALETTE_YUV422P?"YUV 4:2:2 Planar":( + vid_pic.palette==VIDEO_PALETTE_YUV411P?"YUV 4:1:1 Planar":( + vid_pic.palette==VIDEO_PALETTE_YUV420P?"YUV 4:2:0 Planar":( + vid_pic.palette==VIDEO_PALETTE_YUV410P?"YUV 4:1:0 Planar":"Unknown" + )))))))))))))))))); sprintf( output+strlen(output), " Colour Depth: %d\n", vid_pic.depth ); sprintf( output+strlen(output), " Brightness: %d\n", vid_pic.brightness ); sprintf( output+strlen(output), " Hue: %d\n", vid_pic.hue ); @@ -1721,19 +1717,19 @@ bool LocalCamera::GetCurrentSettings( const char *device, char *output, int vers sprintf( output+strlen(output), " Name: %s\n", vid_src.name ); sprintf( output+strlen(output), " Channel: %d\n", vid_src.channel ); sprintf( output+strlen(output), " Flags: %d\n%s%s", vid_src.flags, - vid_src.flags&VIDEO_VC_TUNER?" Channel has a tuner\n":"", - vid_src.flags&VIDEO_VC_AUDIO?" Channel has audio\n":"" - ); + vid_src.flags&VIDEO_VC_TUNER?" Channel has a tuner\n":"", + vid_src.flags&VIDEO_VC_AUDIO?" Channel has audio\n":"" + ); sprintf( output+strlen(output), " Type: %d - %s\n", vid_src.type, - vid_src.type==VIDEO_TYPE_TV?"TV":( - vid_src.type==VIDEO_TYPE_CAMERA?"Camera":"Unknown" - )); + vid_src.type==VIDEO_TYPE_TV?"TV":( + vid_src.type==VIDEO_TYPE_CAMERA?"Camera":"Unknown" + )); sprintf( output+strlen(output), " Format: %d - %s\n", vid_src.norm, - vid_src.norm==VIDEO_MODE_PAL?"PAL":( - vid_src.norm==VIDEO_MODE_NTSC?"NTSC":( - vid_src.norm==VIDEO_MODE_SECAM?"SECAM":( - vid_src.norm==VIDEO_MODE_AUTO?"AUTO":"Unknown" - )))); + vid_src.norm==VIDEO_MODE_PAL?"PAL":( + vid_src.norm==VIDEO_MODE_NTSC?"NTSC":( + vid_src.norm==VIDEO_MODE_SECAM?"SECAM":( + vid_src.norm==VIDEO_MODE_AUTO?"AUTO":"Unknown" + )))); } else { @@ -1772,7 +1768,7 @@ int LocalCamera::Brightness( int p_brightness ) Error( "Unable to query brightness: %s", strerror(errno) ) else Warning( "Brightness control is not supported" ) - //Info( "Brightness 1 %d", vid_control.value ); + //Info( "Brightness 1 %d", vid_control.value ); } else if ( p_brightness >= 0 ) { @@ -2058,7 +2054,7 @@ int LocalCamera::Capture( Image &image ) static uint8_t* directbuffer = NULL; static int capture_frame = -1; int buffer_bytesused = 0; - + int captures_per_frame = 1; if ( channel_count > 1 ) captures_per_frame = v4l_captures_per_frame; @@ -2066,8 +2062,8 @@ int LocalCamera::Capture( Image &image ) captures_per_frame = 1; Warning( "Invalid Captures Per Frame setting: %d", captures_per_frame ); } - - + + // Do the capture, unless we are the second or subsequent camera on a channel, in which case just reuse the buffer if ( channel_prime ) { @@ -2091,7 +2087,7 @@ int LocalCamera::Capture( Image &image ) Warning( "Capture failure, possible signal loss?: %s", strerror(errno) ) else Error( "Unable to capture frame %d: %s", vid_buf.index, strerror(errno) ) - return( -1 ); + return( -1 ); } v4l2_data.bufptr = &vid_buf; @@ -2112,9 +2108,9 @@ int LocalCamera::Capture( Image &image ) buffer_bytesused = v4l2_data.bufptr->bytesused; if((v4l2_data.fmt.fmt.pix.width * v4l2_data.fmt.fmt.pix.height) != (width * height)) { - Fatal("Captured image dimensions differ: V4L2: %dx%d monitor: %dx%d",v4l2_data.fmt.fmt.pix.width,v4l2_data.fmt.fmt.pix.height,width,height); + Fatal("Captured image dimensions differ: V4L2: %dx%d monitor: %dx%d",v4l2_data.fmt.fmt.pix.width,v4l2_data.fmt.fmt.pix.height,width,height); } - + } #endif // ZM_HAS_V4L2 #if ZM_HAS_V4L1 @@ -2146,12 +2142,12 @@ int LocalCamera::Capture( Image &image ) buffer = v4l1_data.bufptr+v4l1_data.frames.offsets[capture_frame]; } #endif // ZM_HAS_V4L1 - } /* prime capture */ - + } /* prime capture */ + if(conversion_type != 0) { - + Debug( 3, "Performing format conversion" ); - + /* Request a writeable buffer of the target image */ directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); if(directbuffer == NULL) { @@ -2165,36 +2161,36 @@ int LocalCamera::Capture( Image &image ) /* Use swscale to convert the image directly into the shared memory */ #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) av_image_fill_arrays(tmpPicture->data, - tmpPicture->linesize, directbuffer, - imagePixFormat, width, height, 1); + tmpPicture->linesize, directbuffer, + imagePixFormat, width, height, 1); #else avpicture_fill( (AVPicture *)tmpPicture, directbuffer, - imagePixFormat, width, height ); + imagePixFormat, width, height ); #endif sws_scale( imgConversionContext, capturePictures[capture_frame]->data, capturePictures[capture_frame]->linesize, 0, height, tmpPicture->data, tmpPicture->linesize ); } #endif if(conversion_type == 2) { - + Debug( 9, "Calling the conversion function" ); /* Call the image conversion function and convert directly into the shared memory */ (*conversion_fptr)(buffer, directbuffer, pixels); } else if(conversion_type == 3) { - + Debug( 9, "Decoding the JPEG image" ); /* JPEG decoding */ image.DecodeJpeg(buffer, buffer_bytesused, colours, subpixelorder); } - + } else { Debug( 3, "No format conversion performed. Assigning the image" ); - + /* No conversion was performed, the image is in the V4L buffers and needs to be copied into the shared memory */ image.Assign( width, height, colours, subpixelorder, buffer, imagesize); - + } - + return( 0 ); } diff --git a/src/zm_local_camera.h b/src/zm_local_camera.h index 4518d7945..dae5f830e 100644 --- a/src/zm_local_camera.h +++ b/src/zm_local_camera.h @@ -23,6 +23,7 @@ #include "zm.h" #include "zm_camera.h" #include "zm_image.h" +#include "zm_packetqueue.h" #if ZM_HAS_V4L @@ -52,31 +53,31 @@ class LocalCamera : public Camera { protected: #if ZM_HAS_V4L2 - struct V4L2MappedBuffer - { - void *start; - size_t length; - }; + struct V4L2MappedBuffer + { + void *start; + size_t length; + }; - struct V4L2Data - { - v4l2_cropcap cropcap; - v4l2_crop crop; - v4l2_format fmt; - v4l2_requestbuffers reqbufs; - V4L2MappedBuffer *buffers; - v4l2_buffer *bufptr; - }; + struct V4L2Data + { + v4l2_cropcap cropcap; + v4l2_crop crop; + v4l2_format fmt; + v4l2_requestbuffers reqbufs; + V4L2MappedBuffer *buffers; + v4l2_buffer *bufptr; + }; #endif // ZM_HAS_V4L2 #if ZM_HAS_V4L1 - struct V4L1Data - { - int active_frame; - video_mbuf frames; - video_mmap *buffers; - unsigned char *bufptr; - }; + struct V4L1Data + { + int active_frame; + video_mbuf frames; + video_mmap *buffers; + unsigned char *bufptr; + }; #endif // ZM_HAS_V4L1 protected: @@ -104,21 +105,21 @@ protected: unsigned int v4l_captures_per_frame; #if ZM_HAS_V4L2 - static V4L2Data v4l2_data; + static V4L2Data v4l2_data; #endif // ZM_HAS_V4L2 #if ZM_HAS_V4L1 - static V4L1Data v4l1_data; + static V4L1Data v4l1_data; #endif // ZM_HAS_V4L1 #if HAVE_LIBSWSCALE - static AVFrame **capturePictures; - _AVPIXELFORMAT imagePixFormat; - _AVPIXELFORMAT capturePixFormat; + static AVFrame **capturePictures; + _AVPIXELFORMAT imagePixFormat; + _AVPIXELFORMAT capturePixFormat; struct SwsContext *imgConversionContext; - AVFrame *tmpPicture; + AVFrame *tmpPicture; #endif // HAVE_LIBSWSCALE - static LocalCamera *last_camera; + static LocalCamera *last_camera; public: LocalCamera( @@ -161,7 +162,7 @@ public: int PreCapture(); int Capture( Image &image ); int PostCapture(); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ) {return(0);}; + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ) {return(0);}; static bool GetCurrentSettings( const char *device, char *output, int version, bool verbose ); }; diff --git a/src/zm_monitor.cpp b/src/zm_monitor.cpp index 97a46b0ab..3b9579880 100644 --- a/src/zm_monitor.cpp +++ b/src/zm_monitor.cpp @@ -75,8 +75,7 @@ std::vector split(const std::string &s, char delim) { -Monitor::MonitorLink::MonitorLink( int p_id, const char *p_name ) : id( p_id ) -{ +Monitor::MonitorLink::MonitorLink( int p_id, const char *p_name ) : id( p_id ) { strncpy( name, p_name, sizeof(name) ); #if ZM_MEM_MAPPED @@ -95,15 +94,12 @@ Monitor::MonitorLink::MonitorLink( int p_id, const char *p_name ) : id( p_id ) connected = false; } -Monitor::MonitorLink::~MonitorLink() -{ +Monitor::MonitorLink::~MonitorLink() { disconnect(); } -bool Monitor::MonitorLink::connect() -{ - if ( !last_connect_time || (time( 0 ) - last_connect_time) > 60 ) - { +bool Monitor::MonitorLink::connect() { + if ( !last_connect_time || (time( 0 ) - last_connect_time) > 60 ) { last_connect_time = time( 0 ); mem_size = sizeof(SharedData) + sizeof(TriggerData); @@ -111,8 +107,7 @@ bool Monitor::MonitorLink::connect() Debug( 1, "link.mem.size=%d", mem_size ); #if ZM_MEM_MAPPED map_fd = open( mem_file, O_RDWR, (mode_t)0600 ); - if ( map_fd < 0 ) - { + if ( map_fd < 0 ) { Debug( 3, "Can't open linked memory map file %s: %s", mem_file, strerror(errno) ); disconnect(); return( false ); @@ -125,44 +120,37 @@ bool Monitor::MonitorLink::connect() } struct stat map_stat; - if ( fstat( map_fd, &map_stat ) < 0 ) - { + if ( fstat( map_fd, &map_stat ) < 0 ) { Error( "Can't stat linked memory map file %s: %s", mem_file, strerror(errno) ); disconnect(); return( false ); } - if ( map_stat.st_size == 0 ) - { + if ( map_stat.st_size == 0 ) { Error( "Linked memory map file %s is empty: %s", mem_file, strerror(errno) ); disconnect(); return( false ); - } - else if ( map_stat.st_size < mem_size ) - { + } else if ( map_stat.st_size < mem_size ) { Error( "Got unexpected memory map file size %ld, expected %d", map_stat.st_size, mem_size ); disconnect(); return( false ); } mem_ptr = (unsigned char *)mmap( NULL, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED, map_fd, 0 ); - if ( mem_ptr == MAP_FAILED ) - { + if ( mem_ptr == MAP_FAILED ) { Error( "Can't map file %s (%d bytes) to memory: %s", mem_file, mem_size, strerror(errno) ); disconnect(); return( false ); } #else // ZM_MEM_MAPPED shm_id = shmget( (config.shm_key&0xffff0000)|id, mem_size, 0700 ); - if ( shm_id < 0 ) - { + if ( shm_id < 0 ) { Debug( 3, "Can't shmget link memory: %s", strerror(errno) ); connected = false; return( false ); } mem_ptr = (unsigned char *)shmat( shm_id, 0, 0 ); - if ( mem_ptr < (void *)0 ) - { + if ( mem_ptr < (void *)0 ) { Debug( 3, "Can't shmat link memory: %s", strerror(errno) ); connected = false; return( false ); @@ -172,8 +160,7 @@ bool Monitor::MonitorLink::connect() shared_data = (SharedData *)mem_ptr; trigger_data = (TriggerData *)((char *)shared_data + sizeof(SharedData)); - if ( !shared_data->valid ) - { + if ( !shared_data->valid ) { Debug( 3, "Linked memory not initialised by capture daemon" ); disconnect(); return( false ); @@ -188,15 +175,12 @@ bool Monitor::MonitorLink::connect() return( false ); } -bool Monitor::MonitorLink::disconnect() -{ - if ( connected ) - { +bool Monitor::MonitorLink::disconnect() { + if ( connected ) { connected = false; #if ZM_MEM_MAPPED - if ( mem_ptr > (void *)0 ) - { + if ( mem_ptr > (void *)0 ) { msync( mem_ptr, mem_size, MS_ASYNC ); munmap( mem_ptr, mem_size ); } @@ -206,25 +190,21 @@ bool Monitor::MonitorLink::disconnect() map_fd = -1; #else // ZM_MEM_MAPPED struct shmid_ds shm_data; - if ( shmctl( shm_id, IPC_STAT, &shm_data ) < 0 ) - { + if ( shmctl( shm_id, IPC_STAT, &shm_data ) < 0 ) { Debug( 3, "Can't shmctl: %s", strerror(errno) ); return( false ); } shm_id = 0; - if ( shm_data.shm_nattch <= 1 ) - { - if ( shmctl( shm_id, IPC_RMID, 0 ) < 0 ) - { + if ( shm_data.shm_nattch <= 1 ) { + if ( shmctl( shm_id, IPC_RMID, 0 ) < 0 ) { Debug( 3, "Can't shmctl: %s", strerror(errno) ); return( false ); } } - if ( shmdt( mem_ptr ) < 0 ) - { + if ( shmdt( mem_ptr ) < 0 ) { Debug( 3, "Can't shmdt: %s", strerror(errno) ); return( false ); } @@ -236,32 +216,24 @@ bool Monitor::MonitorLink::disconnect() return( true ); } -bool Monitor::MonitorLink::isAlarmed() -{ - if ( !connected ) - { +bool Monitor::MonitorLink::isAlarmed() { + if ( !connected ) { return( false ); } return( shared_data->state == ALARM ); } -bool Monitor::MonitorLink::inAlarm() -{ - if ( !connected ) - { +bool Monitor::MonitorLink::inAlarm() { + if ( !connected ) { return( false ); } return( shared_data->state == ALARM || shared_data->state == ALERT ); } -bool Monitor::MonitorLink::hasAlarmed() -{ - if ( shared_data->state == ALARM ) - { +bool Monitor::MonitorLink::hasAlarmed() { + if ( shared_data->state == ALARM ) { return( true ); - } - else if( shared_data->last_event != (unsigned int)last_event ) - { + } else if ( shared_data->last_event != (unsigned int)last_event ) { last_event = shared_data->last_event; } return( false ); @@ -278,7 +250,7 @@ Monitor::Monitor( int p_orientation, unsigned int p_deinterlacing, int p_savejpegs, - int p_videowriter, + VideoWriter p_videowriter, std::string p_encoderparams, bool p_record_audio, const char *p_event_prefix, @@ -316,7 +288,7 @@ Monitor::Monitor( orientation( (Orientation)p_orientation ), deinterlacing( p_deinterlacing ), savejpegspref( p_savejpegs ), - videowriterpref( p_videowriter ), + videowriter( p_videowriter ), encoderparams( p_encoderparams ), record_audio( p_record_audio ), label_coord( p_label_coord ), @@ -359,23 +331,19 @@ Monitor::Monitor( // Change \n to actual line feeds char *token_ptr = label_format; const char *token_string = "\n"; - while( ( token_ptr = strstr( token_ptr, token_string ) ) ) - { - if ( *(token_ptr+1) ) - { + while( ( token_ptr = strstr( token_ptr, token_string ) ) ) { + if ( *(token_ptr+1) ) { *token_ptr = '\n'; token_ptr++; strcpy( token_ptr, token_ptr+1 ); - } - else - { + } else { *token_ptr = '\0'; break; } } - /* Parse encoder parameters */ - ParseEncoderParameters(encoderparams.c_str(), &encoderparamsvec); + /* Parse encoder parameters */ + ParseEncoderParameters(encoderparams.c_str(), &encoderparamsvec); fps = 0.0; event_count = 0; @@ -439,7 +407,7 @@ Monitor::Monitor( trigger_data->trigger_text[0] = 0; trigger_data->trigger_showtext[0] = 0; shared_data->valid = true; - video_store_data->recording = false; + video_store_data->recording = (struct timeval){0}; snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "nothing"); video_store_data->size = sizeof(VideoStoreData); //video_store_data->frameNumber = 0; @@ -452,10 +420,8 @@ Monitor::Monitor( shared_data->alarm_y = -1; } - if ( ( ! mem_ptr ) || ! shared_data->valid ) - { - if ( purpose != QUERY ) - { + if ( ( ! mem_ptr ) || ! shared_data->valid ) { + if ( purpose != QUERY ) { Error( "Shared data not initialised by capture daemon for monitor %s", name ); exit( -1 ); } @@ -478,8 +444,7 @@ Monitor::Monitor( Debug( 1, "Monitor %s LBF = '%s', LBX = %d, LBY = %d, LBS = %d", name, label_format, label_coord.X(), label_coord.Y(), label_size ); Debug( 1, "Monitor %s IBC = %d, WUC = %d, pEC = %d, PEC = %d, EAF = %d, FRI = %d, RBP = %d, ARBP = %d, FM = %d", name, image_buffer_count, warmup_count, pre_event_count, post_event_count, alarm_frame_count, fps_report_interval, ref_blend_perc, alarm_ref_blend_perc, track_motion ); - if ( purpose == ANALYSIS ) - { + if ( purpose == ANALYSIS ) { static char path[PATH_MAX]; strncpy( path, config.dir_events, sizeof(path) ); @@ -487,10 +452,8 @@ Monitor::Monitor( struct stat statbuf; errno = 0; stat( path, &statbuf ); - if ( errno == ENOENT || errno == ENOTDIR ) - { - if ( mkdir( path, 0755 ) ) - { + if ( errno == ENOENT || errno == ENOTDIR ) { + if ( mkdir( path, 0755 ) ) { Error( "Can't make %s: %s", path, strerror(errno)); } } @@ -499,10 +462,8 @@ Monitor::Monitor( errno = 0; stat( path, &statbuf ); - if ( errno == ENOENT || errno == ENOTDIR ) - { - if ( mkdir( path, 0755 ) ) - { + if ( errno == ENOENT || errno == ENOTDIR ) { + if ( mkdir( path, 0755 ) ) { Error( "Can't make %s: %s", path, strerror(errno)); } char temp_path[PATH_MAX]; @@ -516,8 +477,7 @@ Monitor::Monitor( } while( shared_data->last_write_index == (unsigned int)image_buffer_count - && shared_data->last_write_time == 0) - { + && shared_data->last_write_time == 0) { Warning( "Waiting for capture daemon" ); sleep( 1 ); } @@ -576,8 +536,7 @@ bool Monitor::connect() { exit( -1 ); } mem_ptr = (unsigned char *)shmat( shm_id, 0, 0 ); - if ( mem_ptr < (void *)0 ) - { + if ( mem_ptr < (void *)0 ) { Error( "Can't shmat: %s", strerror(errno)); exit( -1 ); } @@ -594,29 +553,25 @@ bool Monitor::connect() { shared_images = (uint8_t*)((unsigned long)shared_images + (64 - ((unsigned long)shared_images % 64))); } image_buffer = new Snapshot[image_buffer_count]; - for ( int i = 0; i < image_buffer_count; i++ ) - { + for ( int i = 0; i < image_buffer_count; i++ ) { image_buffer[i].timestamp = &(shared_timestamps[i]); image_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder(), &(shared_images[i*camera->ImageSize()]) ); image_buffer[i].image->HoldBuffer(true); /* Don't release the internal buffer or replace it with another */ } - if ( (deinterlacing & 0xff) == 4) - { + if ( (deinterlacing & 0xff) == 4) { /* Four field motion adaptive deinterlacing in use */ /* Allocate a buffer for the next image */ next_buffer.image = new Image( width, height, camera->Colours(), camera->SubpixelOrder()); next_buffer.timestamp = new struct timeval; } - if ( ( purpose == ANALYSIS ) && analysis_fps ) - { + if ( ( purpose == ANALYSIS ) && analysis_fps ) { // Size of pre event buffer must be greater than pre_event_count // if alarm_frame_count > 1, because in this case the buffer contains // alarmed images that must be discarded when event is created pre_event_buffer_count = pre_event_count + alarm_frame_count - 1; pre_event_buffer = new Snapshot[pre_event_buffer_count]; - for ( int i = 0; i < pre_event_buffer_count; i++ ) - { + for ( int i = 0; i < pre_event_buffer_count; i++ ) { pre_event_buffer[i].timestamp = new struct timeval; pre_event_buffer[i].image = new Image( width, height, camera->Colours(), camera->SubpixelOrder()); } @@ -625,8 +580,7 @@ bool Monitor::connect() { return true; } -Monitor::~Monitor() -{ +Monitor::~Monitor() { if ( timestamps ) { delete[] timestamps; timestamps = 0; @@ -635,30 +589,27 @@ Monitor::~Monitor() delete[] images; images = 0; } - if ( privacy_bitmask ) { - delete[] privacy_bitmask; - privacy_bitmask = NULL; - } + if ( privacy_bitmask ) { + delete[] privacy_bitmask; + privacy_bitmask = NULL; + } if ( mem_ptr ) { if ( event ) { Info( "%s: %03d - Closing event %d, shutting down", name, image_count, event->Id() ); closeEvent(); } - if ( (deinterlacing & 0xff) == 4) - { + if ( (deinterlacing & 0xff) == 4) { delete next_buffer.image; delete next_buffer.timestamp; } - for ( int i = 0; i < image_buffer_count; i++ ) - { + for ( int i = 0; i < image_buffer_count; i++ ) { delete image_buffer[i].image; } delete[] image_buffer; } // end if mem_ptr - for ( int i = 0; i < n_zones; i++ ) - { + for ( int i = 0; i < n_zones; i++ ) { delete zones[i]; } delete[] zones; @@ -666,24 +617,19 @@ Monitor::~Monitor() delete camera; if ( mem_ptr ) { - if ( purpose == ANALYSIS ) - { + if ( purpose == ANALYSIS ) { shared_data->state = state = IDLE; shared_data->last_read_index = image_buffer_count; shared_data->last_read_time = 0; - if ( analysis_fps ) - { - for ( int i = 0; i < pre_event_buffer_count; i++ ) - { + if ( analysis_fps ) { + for ( int i = 0; i < pre_event_buffer_count; i++ ) { delete pre_event_buffer[i].image; delete pre_event_buffer[i].timestamp; } delete[] pre_event_buffer; } - } - else if ( purpose == CAPTURE ) - { + } else if ( purpose == CAPTURE ) { shared_data->valid = false; memset( mem_ptr, 0, mem_size ); } @@ -696,12 +642,12 @@ Monitor::~Monitor() close( map_fd ); if ( purpose == CAPTURE ) { - char mmap_path[PATH_MAX] = ""; - snprintf( mmap_path, sizeof(mmap_path), "%s/zm.mmap.%d", config.path_map, id ); + char mmap_path[PATH_MAX] = ""; + snprintf( mmap_path, sizeof(mmap_path), "%s/zm.mmap.%d", config.path_map, id ); - if ( unlink( mmap_path ) < 0 ) { - Warning( "Can't unlink '%s': %s", mmap_path, strerror(errno) ); - } + if ( unlink( mmap_path ) < 0 ) { + Warning( "Can't unlink '%s': %s", mmap_path, strerror(errno) ); + } } #else // ZM_MEM_MAPPED struct shmid_ds shm_data; @@ -719,8 +665,7 @@ Monitor::~Monitor() } // end if mem_ptr } -void Monitor::AddZones( int p_n_zones, Zone *p_zones[] ) -{ +void Monitor::AddZones( int p_n_zones, Zone *p_zones[] ) { for ( int i = 0; i < n_zones; i++ ) delete zones[i]; delete[] zones; @@ -728,19 +673,15 @@ void Monitor::AddZones( int p_n_zones, Zone *p_zones[] ) zones = p_zones; } -void Monitor::AddPrivacyBitmask( Zone *p_zones[] ) -{ +void Monitor::AddPrivacyBitmask( Zone *p_zones[] ) { if ( privacy_bitmask ) delete[] privacy_bitmask; privacy_bitmask = NULL; Image *privacy_image = NULL; - for ( int i = 0; i < n_zones; i++ ) - { - if ( p_zones[i]->IsPrivacy() ) - { - if ( !privacy_image ) - { + for ( int i = 0; i < n_zones; i++ ) { + if ( p_zones[i]->IsPrivacy() ) { + if ( !privacy_image ) { privacy_image = new Image( width, height, 1, ZM_SUBPIX_ORDER_NONE); privacy_image->Clear(); } @@ -752,20 +693,16 @@ void Monitor::AddPrivacyBitmask( Zone *p_zones[] ) privacy_bitmask = privacy_image->Buffer(); } -Monitor::State Monitor::GetState() const -{ +Monitor::State Monitor::GetState() const { return( (State)shared_data->state ); } -int Monitor::GetImage( int index, int scale ) -{ - if ( index < 0 || index > image_buffer_count ) - { +int Monitor::GetImage( int index, int scale ) { + if ( index < 0 || index > image_buffer_count ) { index = shared_data->last_write_index; } - if ( index != image_buffer_count ) - { + if ( index != image_buffer_count ) { Image *image; // If we are going to be modifying the snapshot before writing, then we need to copy it if ( ( scale != ZM_SCALE_BASE ) || ( !config.timestamp_on_capture ) ) { @@ -792,75 +729,59 @@ int Monitor::GetImage( int index, int scale ) static char filename[PATH_MAX]; snprintf( filename, sizeof(filename), "Monitor%d.jpg", id ); image->WriteJpeg( filename ); - } - else - { + } else { Error( "Unable to generate image, no images in buffer" ); } return( 0 ); } -struct timeval Monitor::GetTimestamp( int index ) const -{ - if ( index < 0 || index > image_buffer_count ) - { +struct timeval Monitor::GetTimestamp( int index ) const { + if ( index < 0 || index > image_buffer_count ) { index = shared_data->last_write_index; } - if ( index != image_buffer_count ) - { + if ( index != image_buffer_count ) { Snapshot *snap = &image_buffer[index]; return( *(snap->timestamp) ); - } - else - { + } else { static struct timeval null_tv = { 0, 0 }; return( null_tv ); } } -unsigned int Monitor::GetLastReadIndex() const -{ +unsigned int Monitor::GetLastReadIndex() const { return( shared_data->last_read_index!=(unsigned int)image_buffer_count?shared_data->last_read_index:-1 ); } -unsigned int Monitor::GetLastWriteIndex() const -{ +unsigned int Monitor::GetLastWriteIndex() const { return( shared_data->last_write_index!=(unsigned int)image_buffer_count?shared_data->last_write_index:-1 ); } -unsigned int Monitor::GetLastEvent() const -{ +unsigned int Monitor::GetLastEvent() const { return( shared_data->last_event ); } -double Monitor::GetFPS() const -{ +double Monitor::GetFPS() const { int index1 = shared_data->last_write_index; - if ( index1 == image_buffer_count ) - { + if ( index1 == image_buffer_count ) { return( 0.0 ); } Snapshot *snap1 = &image_buffer[index1]; - if ( !snap1->timestamp || !snap1->timestamp->tv_sec ) - { + if ( !snap1->timestamp || !snap1->timestamp->tv_sec ) { return( 0.0 ); } struct timeval time1 = *snap1->timestamp; int image_count = image_buffer_count; int index2 = (index1+1)%image_buffer_count; - if ( index2 == image_buffer_count ) - { + if ( index2 == image_buffer_count ) { return( 0.0 ); } Snapshot *snap2 = &image_buffer[index2]; - while ( !snap2->timestamp || !snap2->timestamp->tv_sec ) - { - if ( index1 == index2 ) - { + while ( !snap2->timestamp || !snap2->timestamp->tv_sec ) { + if ( index1 == index2 ) { return( 0.0 ); } index2 = (index2+1)%image_buffer_count; @@ -873,143 +794,110 @@ double Monitor::GetFPS() const double curr_fps = image_count/time_diff; - if ( curr_fps < 0.0 ) - { + if ( curr_fps < 0.0 ) { //Error( "Negative FPS %f, time_diff = %lf (%d:%ld.%ld - %d:%ld.%ld), ibc: %d", curr_fps, time_diff, index2, time2.tv_sec, time2.tv_usec, index1, time1.tv_sec, time1.tv_usec, image_buffer_count ); return( 0.0 ); } return( curr_fps ); } -useconds_t Monitor::GetAnalysisRate() -{ +useconds_t Monitor::GetAnalysisRate() { double capturing_fps = GetFPS(); - if ( !analysis_fps ) - { + if ( !analysis_fps ) { return( 0 ); - } - else if ( analysis_fps > capturing_fps ) - { + } else if ( analysis_fps > capturing_fps ) { Warning( "Analysis fps (%.2f) is greater than capturing fps (%.2f)", analysis_fps, capturing_fps ); return( 0 ); - } - else - { + } else { return( ( 1000000 / analysis_fps ) - ( 1000000 / capturing_fps ) ); } } -void Monitor::UpdateAdaptiveSkip() -{ - if ( config.opt_adaptive_skip ) - { +void Monitor::UpdateAdaptiveSkip() { + if ( config.opt_adaptive_skip ) { double capturing_fps = GetFPS(); - if ( adaptive_skip && analysis_fps && ( analysis_fps < capturing_fps ) ) - { + if ( adaptive_skip && analysis_fps && ( analysis_fps < capturing_fps ) ) { Info( "Analysis fps (%.2f) is lower than capturing fps (%.2f), disabling adaptive skip feature", analysis_fps, capturing_fps ); adaptive_skip = false; - } - else if ( !adaptive_skip && ( !analysis_fps || ( analysis_fps >= capturing_fps ) ) ) - { + } else if ( !adaptive_skip && ( !analysis_fps || ( analysis_fps >= capturing_fps ) ) ) { Info( "Enabling adaptive skip feature" ); adaptive_skip = true; } - } - else - { + } else { adaptive_skip = false; } } -void Monitor::ForceAlarmOn( int force_score, const char *force_cause, const char *force_text ) -{ +void Monitor::ForceAlarmOn( int force_score, const char *force_cause, const char *force_text ) { trigger_data->trigger_state = TRIGGER_ON; trigger_data->trigger_score = force_score; strncpy( trigger_data->trigger_cause, force_cause, sizeof(trigger_data->trigger_cause) ); strncpy( trigger_data->trigger_text, force_text, sizeof(trigger_data->trigger_text) ); } -void Monitor::ForceAlarmOff() -{ +void Monitor::ForceAlarmOff() { trigger_data->trigger_state = TRIGGER_OFF; } -void Monitor::CancelForced() -{ +void Monitor::CancelForced() { trigger_data->trigger_state = TRIGGER_CANCEL; } -void Monitor::actionReload() -{ +void Monitor::actionReload() { shared_data->action |= RELOAD; } -void Monitor::actionEnable() -{ +void Monitor::actionEnable() { shared_data->action |= RELOAD; static char sql[ZM_SQL_SML_BUFSIZ]; snprintf( sql, sizeof(sql), "update Monitors set Enabled = 1 where Id = '%d'", id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } } -void Monitor::actionDisable() -{ +void Monitor::actionDisable() { shared_data->action |= RELOAD; static char sql[ZM_SQL_SML_BUFSIZ]; snprintf( sql, sizeof(sql), "update Monitors set Enabled = 0 where Id = '%d'", id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } } -void Monitor::actionSuspend() -{ +void Monitor::actionSuspend() { shared_data->action |= SUSPEND; } -void Monitor::actionResume() -{ +void Monitor::actionResume() { shared_data->action |= RESUME; } -int Monitor::actionBrightness( int p_brightness ) -{ - if ( purpose != CAPTURE ) - { - if ( p_brightness >= 0 ) - { +int Monitor::actionBrightness( int p_brightness ) { + if ( purpose != CAPTURE ) { + if ( p_brightness >= 0 ) { shared_data->brightness = p_brightness; shared_data->action |= SET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & SET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & SET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to set brightness" ); return( -1 ); } } - } - else - { + } else { shared_data->action |= GET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & GET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & GET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to get brightness" ); return( -1 ); } @@ -1020,36 +908,27 @@ int Monitor::actionBrightness( int p_brightness ) return( camera->Brightness( p_brightness ) ); } -int Monitor::actionContrast( int p_contrast ) -{ - if ( purpose != CAPTURE ) - { - if ( p_contrast >= 0 ) - { +int Monitor::actionContrast( int p_contrast ) { + if ( purpose != CAPTURE ) { + if ( p_contrast >= 0 ) { shared_data->contrast = p_contrast; shared_data->action |= SET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & SET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & SET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to set contrast" ); return( -1 ); } } - } - else - { + } else { shared_data->action |= GET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & GET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & GET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to get contrast" ); return( -1 ); } @@ -1060,36 +939,27 @@ int Monitor::actionContrast( int p_contrast ) return( camera->Contrast( p_contrast ) ); } -int Monitor::actionHue( int p_hue ) -{ - if ( purpose != CAPTURE ) - { - if ( p_hue >= 0 ) - { +int Monitor::actionHue( int p_hue ) { + if ( purpose != CAPTURE ) { + if ( p_hue >= 0 ) { shared_data->hue = p_hue; shared_data->action |= SET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & SET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & SET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to set hue" ); return( -1 ); } } - } - else - { + } else { shared_data->action |= GET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & GET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & GET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to get hue" ); return( -1 ); } @@ -1100,36 +970,27 @@ int Monitor::actionHue( int p_hue ) return( camera->Hue( p_hue ) ); } -int Monitor::actionColour( int p_colour ) -{ - if ( purpose != CAPTURE ) - { - if ( p_colour >= 0 ) - { +int Monitor::actionColour( int p_colour ) { + if ( purpose != CAPTURE ) { + if ( p_colour >= 0 ) { shared_data->colour = p_colour; shared_data->action |= SET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & SET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & SET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to set colour" ); return( -1 ); } } - } - else - { + } else { shared_data->action |= GET_SETTINGS; int wait_loops = 10; - while ( shared_data->action & GET_SETTINGS ) - { - if ( wait_loops-- ) + while ( shared_data->action & GET_SETTINGS ) { + if ( wait_loops-- ) { usleep( 100000 ); - else - { + } else { Warning( "Timed out waiting to get colour" ); return( -1 ); } @@ -1140,16 +1001,13 @@ int Monitor::actionColour( int p_colour ) return( camera->Colour( p_colour ) ); } -void Monitor::DumpZoneImage( const char *zone_string ) -{ +void Monitor::DumpZoneImage( const char *zone_string ) { int exclude_id = 0; int extra_colour = 0; Polygon extra_zone; - if ( zone_string ) - { - if ( !Zone::ParseZoneString( zone_string, exclude_id, extra_colour, extra_zone ) ) - { + if ( zone_string ) { + if ( !Zone::ParseZoneString( zone_string, exclude_id, extra_colour, extra_zone ) ) { Error( "Failed to parse zone string, ignoring" ); } } @@ -1163,36 +1021,23 @@ void Monitor::DumpZoneImage( const char *zone_string ) zone_image.Colourise(ZM_COLOUR_RGB24, ZM_SUBPIX_ORDER_RGB ); } - for( int i = 0; i < n_zones; i++ ) - { + for( int i = 0; i < n_zones; i++ ) { if ( exclude_id && (!extra_colour || extra_zone.getNumCoords()) && zones[i]->Id() == exclude_id ) continue; Rgb colour; - if ( exclude_id && !extra_zone.getNumCoords() && zones[i]->Id() == exclude_id ) - { + if ( exclude_id && !extra_zone.getNumCoords() && zones[i]->Id() == exclude_id ) { colour = extra_colour; - } - else - { - if ( zones[i]->IsActive() ) - { + } else { + if ( zones[i]->IsActive() ) { colour = RGB_RED; - } - else if ( zones[i]->IsInclusive() ) - { + } else if ( zones[i]->IsInclusive() ) { colour = RGB_ORANGE; - } - else if ( zones[i]->IsExclusive() ) - { + } else if ( zones[i]->IsExclusive() ) { colour = RGB_PURPLE; - } - else if ( zones[i]->IsPreclusive() ) - { + } else if ( zones[i]->IsPreclusive() ) { colour = RGB_BLUE; - } - else - { + } else { colour = RGB_WHITE; } } @@ -1200,8 +1045,7 @@ void Monitor::DumpZoneImage( const char *zone_string ) zone_image.Outline( colour, zones[i]->GetPolygon() ); } - if ( extra_zone.getNumCoords() ) - { + if ( extra_zone.getNumCoords() ) { zone_image.Fill( extra_colour, 2, extra_zone ); zone_image.Outline( extra_colour, extra_zone ); } @@ -1211,10 +1055,8 @@ void Monitor::DumpZoneImage( const char *zone_string ) zone_image.WriteJpeg( filename ); } -void Monitor::DumpImage( Image *dump_image ) const -{ - if ( image_count && !(image_count%10) ) - { +void Monitor::DumpImage( Image *dump_image ) const { + if ( image_count && !(image_count%10) ) { static char filename[PATH_MAX]; static char new_filename[PATH_MAX]; snprintf( filename, sizeof(filename), "Monitor%d.jpg", id ); @@ -1224,8 +1066,7 @@ void Monitor::DumpImage( Image *dump_image ) const } } -bool Monitor::CheckSignal( const Image *image ) -{ +bool Monitor::CheckSignal( const Image *image ) { static bool static_undef = true; /* RGB24 colors */ static uint8_t red_val; @@ -1235,10 +1076,8 @@ bool Monitor::CheckSignal( const Image *image ) static Rgb colour_val; /* RGB32 color */ static int usedsubpixorder; - if ( config.signal_check_points > 0 ) - { - if ( static_undef ) - { + if ( config.signal_check_points > 0 ) { + if ( static_undef ) { static_undef = false; usedsubpixorder = camera->SubpixelOrder(); colour_val = rgb_convert(signal_check_colour, ZM_SUBPIX_ORDER_BGR); /* HTML colour code is actually BGR in memory, we want RGB */ @@ -1255,10 +1094,8 @@ bool Monitor::CheckSignal( const Image *image ) int colours = image->Colours(); int index = 0; - for ( int i = 0; i < config.signal_check_points; i++ ) - { - while( true ) - { + for ( int i = 0; i < config.signal_check_points; i++ ) { + while( true ) { index = (int)(((long long)rand()*(long long)(pixels-1))/RAND_MAX); if ( !config.timestamp_on_capture || !label_format[0] ) break; @@ -1267,67 +1104,62 @@ bool Monitor::CheckSignal( const Image *image ) break; } - if(colours == ZM_COLOUR_GRAY8) { - if ( *(buffer+index) != grayscale_val ) - return true; - - } else if(colours == ZM_COLOUR_RGB24) { - const uint8_t *ptr = buffer+(index*colours); - - if ( usedsubpixorder == ZM_SUBPIX_ORDER_BGR) { - if ( (RED_PTR_BGRA(ptr) != red_val) || (GREEN_PTR_BGRA(ptr) != green_val) || (BLUE_PTR_BGRA(ptr) != blue_val) ) - return true; - } else { - /* Assume RGB */ - if ( (RED_PTR_RGBA(ptr) != red_val) || (GREEN_PTR_RGBA(ptr) != green_val) || (BLUE_PTR_RGBA(ptr) != blue_val) ) + if(colours == ZM_COLOUR_GRAY8) { + if ( *(buffer+index) != grayscale_val ) return true; + + } else if(colours == ZM_COLOUR_RGB24) { + const uint8_t *ptr = buffer+(index*colours); + + if ( usedsubpixorder == ZM_SUBPIX_ORDER_BGR) { + if ( (RED_PTR_BGRA(ptr) != red_val) || (GREEN_PTR_BGRA(ptr) != green_val) || (BLUE_PTR_BGRA(ptr) != blue_val) ) + return true; + } else { + /* Assume RGB */ + if ( (RED_PTR_RGBA(ptr) != red_val) || (GREEN_PTR_RGBA(ptr) != green_val) || (BLUE_PTR_RGBA(ptr) != blue_val) ) + return true; + } + + } else if(colours == ZM_COLOUR_RGB32) { + if ( usedsubpixorder == ZM_SUBPIX_ORDER_ARGB || usedsubpixorder == ZM_SUBPIX_ORDER_ABGR) { + if ( ARGB_ABGR_ZEROALPHA(*(((const Rgb*)buffer)+index)) != ARGB_ABGR_ZEROALPHA(colour_val) ) + return true; + } else { + /* Assume RGBA or BGRA */ + if ( RGBA_BGRA_ZEROALPHA(*(((const Rgb*)buffer)+index)) != RGBA_BGRA_ZEROALPHA(colour_val) ) + return true; + } } - - } else if(colours == ZM_COLOUR_RGB32) { - if ( usedsubpixorder == ZM_SUBPIX_ORDER_ARGB || usedsubpixorder == ZM_SUBPIX_ORDER_ABGR) { - if ( ARGB_ABGR_ZEROALPHA(*(((const Rgb*)buffer)+index)) != ARGB_ABGR_ZEROALPHA(colour_val) ) - return true; - } else { - /* Assume RGBA or BGRA */ - if ( RGBA_BGRA_ZEROALPHA(*(((const Rgb*)buffer)+index)) != RGBA_BGRA_ZEROALPHA(colour_val) ) - return true; - } - } - + } return( false ); } return( true ); } -bool Monitor::Analyse() -{ - if ( shared_data->last_read_index == shared_data->last_write_index ) - { - // I wonder how often this happens. Maybe if this happens we should sleep or something? +bool Monitor::Analyse() { + if ( shared_data->last_read_index == shared_data->last_write_index ) { + // I wonder how often this happens. Maybe if this happens we should sleep or something? return( false ); } struct timeval now; gettimeofday( &now, NULL ); - if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) - { + if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) { fps = double(fps_report_interval)/(now.tv_sec-last_fps_time); Info( "%s: %d - Analysing at %.2f fps", name, image_count, fps ); last_fps_time = now.tv_sec; } int index; - if ( adaptive_skip ) - { + if ( adaptive_skip ) { int read_margin = shared_data->last_read_index - shared_data->last_write_index; if ( read_margin < 0 ) read_margin += image_buffer_count; int step = 1; // Isn't read_margin always > 0 here? - if ( read_margin > 0 ) - { + if ( read_margin > 0 ) { // TODO explain this so... 90% of image buffer / 50% of read margin? step = (9*image_buffer_count)/(5*read_margin); } @@ -1336,21 +1168,15 @@ bool Monitor::Analyse() if ( pending_frames < 0 ) pending_frames += image_buffer_count; Debug( 4, "RI:%d, WI: %d, PF = %d, RM = %d, Step = %d", shared_data->last_read_index, shared_data->last_write_index, pending_frames, read_margin, step ); - if ( step <= pending_frames ) - { + if ( step <= pending_frames ) { index = (shared_data->last_read_index+step)%image_buffer_count; - } - else - { - if ( pending_frames ) - { + } else { + if ( pending_frames ) { Warning( "Approaching buffer overrun, consider slowing capture, simplifying analysis or increasing ring buffer size" ); } index = shared_data->last_write_index%image_buffer_count; } - } - else - { + } else { index = shared_data->last_write_index%image_buffer_count; } @@ -1358,32 +1184,25 @@ bool Monitor::Analyse() struct timeval *timestamp = snap->timestamp; Image *snap_image = snap->image; - if ( shared_data->action ) - { - if ( shared_data->action & RELOAD ) - { + if ( shared_data->action ) { + if ( shared_data->action & RELOAD ) { Info( "Received reload indication at count %d", image_count ); shared_data->action &= ~RELOAD; Reload(); } - if ( shared_data->action & SUSPEND ) - { - if ( Active() ) - { + if ( shared_data->action & SUSPEND ) { + if ( Active() ) { Info( "Received suspend indication at count %d", image_count ); shared_data->active = false; //closeEvent(); } - if ( config.max_suspend_time ) - { + if ( config.max_suspend_time ) { auto_resume_time = now.tv_sec + config.max_suspend_time; } shared_data->action &= ~SUSPEND; } - if ( shared_data->action & RESUME ) - { - if ( Enabled() && !Active() ) - { + if ( shared_data->action & RESUME ) { + if ( Enabled() && !Active() ) { Info( "Received resume indication at count %d", image_count ); shared_data->active = true; ref_image = *snap_image; @@ -1393,8 +1212,7 @@ bool Monitor::Analyse() shared_data->action &= ~RESUME; } } - if ( auto_resume_time && (now.tv_sec >= auto_resume_time) ) - { + if ( auto_resume_time && (now.tv_sec >= auto_resume_time) ) { Info( "Auto resuming at count %d", image_count ); shared_data->active = true; ref_image = *snap_image; @@ -1406,8 +1224,7 @@ bool Monitor::Analyse() static int last_section_mod = 0; static bool last_signal; - if ( static_undef ) - { + if ( static_undef ) { // Sure would be nice to be able to assume that these were already initialized. It's just 1 compare/branch, but really not neccessary. static_undef = false; timestamps = new struct timeval *[pre_event_count]; @@ -1415,27 +1232,22 @@ bool Monitor::Analyse() last_signal = shared_data->signal; } - if ( Enabled() ) - { + if ( Enabled() ) { bool signal = shared_data->signal; bool signal_change = (signal != last_signal); //Set video recording flag for event start constructor and easy reference in code // TODO: Use enum instead of the # 2. Makes for easier reading bool videoRecording = ((GetOptVideoWriter() == 2) && camera->SupportsNativeVideo()); - if ( trigger_data->trigger_state != TRIGGER_OFF ) - { + if ( trigger_data->trigger_state != TRIGGER_OFF ) { unsigned int score = 0; - if ( Ready() ) - { + if ( Ready() ) { std::string cause; Event::StringSetMap noteSetMap; - if ( trigger_data->trigger_state == TRIGGER_ON ) - { + if ( trigger_data->trigger_state == TRIGGER_ON ) { score += trigger_data->trigger_score; - if ( !event ) - { + if ( !event ) { if ( cause.length() ) cause += ", "; cause += trigger_data->trigger_cause; @@ -1444,25 +1256,21 @@ bool Monitor::Analyse() noteSet.insert( trigger_data->trigger_text ); noteSetMap[trigger_data->trigger_cause] = noteSet; } - if ( signal_change ) - { + if ( signal_change ) { const char *signalText; - if ( !signal ) + if ( !signal ) { signalText = "Lost"; - else - { + } else { signalText = "Reacquired"; score += 100; } Warning( "%s: %s", SIGNAL_CAUSE, signalText ); - if ( event && !signal ) - { + if ( event && !signal ) { Info( "%s: %03d - Closing event %d, signal loss", name, image_count, event->Id() ); closeEvent(); last_section_mod = 0; } - if ( !event ) - { + if ( !event ) { if ( cause.length() ) cause += ", "; cause += SIGNAL_CAUSE; @@ -1473,28 +1281,22 @@ bool Monitor::Analyse() shared_data->state = state = IDLE; shared_data->active = signal; ref_image = *snap_image; - } - else if ( signal && Active() && (function == MODECT || function == MOCORD) ) - { + + } else if ( signal && Active() && (function == MODECT || function == MOCORD) ) { Event::StringSet zoneSet; int motion_score = last_motion_score; - if ( !(image_count % (motion_frame_skip+1) ) ) - { + if ( !(image_count % (motion_frame_skip+1) ) ) { // Get new score. motion_score = last_motion_score = DetectMotion( *snap_image, zoneSet ); } //int motion_score = DetectBlack( *snap_image, zoneSet ); - if ( motion_score ) - { - if ( !event ) - { + if ( motion_score ) { + if ( !event ) { score += motion_score; if ( cause.length() ) cause += ", "; cause += MOTION_CAUSE; - } - else - { + } else { score += motion_score; } noteSetMap[MOTION_CAUSE] = zoneSet; @@ -1502,20 +1304,14 @@ bool Monitor::Analyse() } shared_data->active = signal; } - if ( (!signal_change && signal) && n_linked_monitors > 0 ) - { + if ( (!signal_change && signal) && n_linked_monitors > 0 ) { bool first_link = true; Event::StringSet noteSet; - for ( int i = 0; i < n_linked_monitors; i++ ) - { - if ( linked_monitors[i]->isConnected() ) - { - if ( linked_monitors[i]->hasAlarmed() ) - { - if ( !event ) - { - if ( first_link ) - { + for ( int i = 0; i < n_linked_monitors; i++ ) { + if ( linked_monitors[i]->isConnected() ) { + if ( linked_monitors[i]->hasAlarmed() ) { + if ( !event ) { + if ( first_link ) { if ( cause.length() ) cause += ", "; cause += LINKED_CAUSE; @@ -1525,9 +1321,7 @@ bool Monitor::Analyse() noteSet.insert( linked_monitors[i]->Name() ); score += 50; } - } - else - { + } else { linked_monitors[i]->connect(); } } @@ -1535,142 +1329,117 @@ bool Monitor::Analyse() noteSetMap[LINKED_CAUSE] = noteSet; } //TODO: What happens is the event closes and sets recording to false then recording to true again so quickly that our capture daemon never picks it up. Maybe need a refresh flag? - if ( (!signal_change && signal) && (function == RECORD || function == MOCORD) ) - { - if ( event ) - { + if ( (!signal_change && signal) && (function == RECORD || function == MOCORD) ) { + if ( event ) { //TODO: We shouldn't have to do this every time. Not sure why it clears itself if this isn't here?? snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile()); int section_mod = timestamp->tv_sec%section_length; - if ( section_mod < last_section_mod ) - { - if ( state == IDLE || state == TAPE || event_close_mode == CLOSE_TIME ) - { - if ( state == TAPE ) - { + if ( section_mod < last_section_mod ) { + if ( state == IDLE || state == TAPE || event_close_mode == CLOSE_TIME ) { + if ( state == TAPE ) { shared_data->state = state = IDLE; Info( "%s: %03d - Closing event %d, section end", name, image_count, event->Id() ) - } - else + } else { Info( "%s: %03d - Closing event %d, section end forced ", name, image_count, event->Id() ); + } closeEvent(); last_section_mod = 0; } - } - else - { + } else { last_section_mod = section_mod; } } - if ( !event ) - { + if ( !event ) { // Create event event = new Event( this, *timestamp, "Continuous", noteSetMap, videoRecording ); shared_data->last_event = event->Id(); //set up video store data snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile()); - video_store_data->recording = true; + video_store_data->recording = event->StartTime(); Info( "%s: %03d - Opening new event %d, section start", name, image_count, event->Id() ); /* To prevent cancelling out an existing alert\prealarm\alarm state */ - if ( state == IDLE ) - { + if ( state == IDLE ) { shared_data->state = state = TAPE; } //if ( config.overlap_timed_events ) - if ( false ) - { + if ( false ) { int pre_index; int pre_event_images = pre_event_count; - if ( analysis_fps ) - { + if ( analysis_fps ) { // If analysis fps is set, // compute the index for pre event images in the dedicated buffer pre_index = image_count%pre_event_buffer_count; // Seek forward the next filled slot in to the buffer (oldest data) // from the current position - while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) - { + while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) { pre_index = (pre_index + 1)%pre_event_buffer_count; // Slot is empty, removing image from counter pre_event_images--; } - } - else - { + } else { // If analysis fps is not set (analysis performed at capturing framerate), // compute the index for pre event images in the capturing buffer pre_index = ((index + image_buffer_count) - pre_event_count)%image_buffer_count; // Seek forward the next filled slot in to the buffer (oldest data) // from the current position - while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) - { + while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) { pre_index = (pre_index + 1)%image_buffer_count; // Slot is empty, removing image from counter pre_event_images--; } } - if ( pre_event_images ) - { - if ( analysis_fps ) - for ( int i = 0; i < pre_event_images; i++ ) - { + if ( pre_event_images ) { + if ( analysis_fps ) { + for ( int i = 0; i < pre_event_images; i++ ) { timestamps[i] = pre_event_buffer[pre_index].timestamp; images[i] = pre_event_buffer[pre_index].image; pre_index = (pre_index + 1)%pre_event_buffer_count; } - else - for ( int i = 0; i < pre_event_images; i++ ) - { + } else { + for ( int i = 0; i < pre_event_images; i++ ) { timestamps[i] = image_buffer[pre_index].timestamp; images[i] = image_buffer[pre_index].image; pre_index = (pre_index + 1)%image_buffer_count; } + } event->AddFrames( pre_event_images, images, timestamps ); } - } - } + } // end if false or config.overlap_timed_events + } // end if ! event } - if ( score ) - { - if ( (state == IDLE || state == TAPE || state == PREALARM ) ) - { - if ( Event::PreAlarmCount() >= (alarm_frame_count-1) ) - { + if ( score ) { + if ( (state == IDLE || state == TAPE || state == PREALARM ) ) { + if ( Event::PreAlarmCount() >= (alarm_frame_count-1) ) { Info( "%s: %03d - Gone into alarm state", name, image_count ); shared_data->state = state = ALARM; - if ( signal_change || (function != MOCORD && state != ALERT) ) - { + if ( signal_change || (function != MOCORD && state != ALERT) ) { int pre_index; int pre_event_images = pre_event_count; - if ( analysis_fps ) - { + if ( analysis_fps ) { // If analysis fps is set, // compute the index for pre event images in the dedicated buffer pre_index = image_count%pre_event_buffer_count; // Seek forward the next filled slot in to the buffer (oldest data) // from the current position - while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) - { + while ( pre_event_images && !pre_event_buffer[pre_index].timestamp->tv_sec ) { pre_index = (pre_index + 1)%pre_event_buffer_count; // Slot is empty, removing image from counter pre_event_images--; } event = new Event( this, *(pre_event_buffer[pre_index].timestamp), cause, noteSetMap ); - } - else - { + } else { // If analysis fps is not set (analysis performed at capturing framerate), // compute the index for pre event images in the capturing buffer if ( alarm_frame_count > 1 ) @@ -1680,8 +1449,7 @@ bool Monitor::Analyse() // Seek forward the next filled slot in to the buffer (oldest data) // from the current position - while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) - { + while ( pre_event_images && !image_buffer[pre_index].timestamp->tv_sec ) { pre_index = (pre_index + 1)%image_buffer_count; // Slot is empty, removing image from counter pre_event_images--; @@ -1692,133 +1460,98 @@ bool Monitor::Analyse() shared_data->last_event = event->Id(); //set up video store data snprintf(video_store_data->event_file, sizeof(video_store_data->event_file), "%s", event->getEventFile()); - video_store_data->recording = true; + video_store_data->recording = event->StartTime(); Info( "%s: %03d - Opening new event %d, alarm start", name, image_count, event->Id() ); - if ( pre_event_images ) - { - if ( analysis_fps ) - for ( int i = 0; i < pre_event_images; i++ ) - { + if ( pre_event_images ) { + if ( analysis_fps ) { + for ( int i = 0; i < pre_event_images; i++ ) { timestamps[i] = pre_event_buffer[pre_index].timestamp; images[i] = pre_event_buffer[pre_index].image; pre_index = (pre_index + 1)%pre_event_buffer_count; } - else - for ( int i = 0; i < pre_event_images; i++ ) - { + } else { + for ( int i = 0; i < pre_event_images; i++ ) { timestamps[i] = image_buffer[pre_index].timestamp; images[i] = image_buffer[pre_index].image; pre_index = (pre_index + 1)%image_buffer_count; } + } event->AddFrames( pre_event_images, images, timestamps ); } - if ( alarm_frame_count ) - { + if ( alarm_frame_count ) { event->SavePreAlarmFrames(); } } - } - else if ( state != PREALARM ) - { + } else if ( state != PREALARM ) { Info( "%s: %03d - Gone into prealarm state", name, image_count ); shared_data->state = state = PREALARM; } - } - else if ( state == ALERT ) - { + } else if ( state == ALERT ) { Info( "%s: %03d - Gone back into alarm state", name, image_count ); shared_data->state = state = ALARM; } last_alarm_count = image_count; - } - else - { - if ( state == ALARM ) - { + } else { + if ( state == ALARM ) { Info( "%s: %03d - Gone into alert state", name, image_count ); shared_data->state = state = ALERT; - } - else if ( state == ALERT ) - { - if ( image_count-last_alarm_count > post_event_count ) - { + } else if ( state == ALERT ) { + if ( image_count-last_alarm_count > post_event_count ) { Info( "%s: %03d - Left alarm state (%d) - %d(%d) images", name, image_count, event->Id(), event->Frames(), event->AlarmFrames() ); //if ( function != MOCORD || event_close_mode == CLOSE_ALARM || event->Cause() == SIGNAL_CAUSE ) - if ( function != MOCORD || event_close_mode == CLOSE_ALARM ) - { + if ( function != MOCORD || event_close_mode == CLOSE_ALARM ) { shared_data->state = state = IDLE; Info( "%s: %03d - Closing event %d, alarm end%s", name, image_count, event->Id(), (function==MOCORD)?", section truncated":"" ); closeEvent(); - } - else - { + } else { shared_data->state = state = TAPE; } } } - if ( state == PREALARM ) - { - if ( function != MOCORD ) - { + if ( state == PREALARM ) { + if ( function != MOCORD ) { shared_data->state = state = IDLE; - } - else - { + } else { shared_data->state = state = TAPE; } } if ( Event::PreAlarmCount() ) Event::EmptyPreAlarmFrames(); } - if ( state != IDLE ) - { - if ( state == PREALARM || state == ALARM ) - { - if ( config.create_analysis_images ) - { + if ( state != IDLE ) { + if ( state == PREALARM || state == ALARM ) { + if ( config.create_analysis_images ) { bool got_anal_image = false; alarm_image.Assign( *snap_image ); - for( int i = 0; i < n_zones; i++ ) - { - if ( zones[i]->Alarmed() ) - { - if ( zones[i]->AlarmImage() ) - { + for( int i = 0; i < n_zones; i++ ) { + if ( zones[i]->Alarmed() ) { + if ( zones[i]->AlarmImage() ) { alarm_image.Overlay( *(zones[i]->AlarmImage()) ); got_anal_image = true; } - if ( config.record_event_stats && state == ALARM ) - { + if ( config.record_event_stats && state == ALARM ) { zones[i]->RecordStats( event ); } } } - if ( got_anal_image ) - { + if ( got_anal_image ) { if ( state == PREALARM ) Event::AddPreAlarmFrame( snap_image, *timestamp, score, &alarm_image ); else event->AddFrame( snap_image, *timestamp, score, &alarm_image ); - } - else - { + } else { if ( state == PREALARM ) Event::AddPreAlarmFrame( snap_image, *timestamp, score ); else event->AddFrame( snap_image, *timestamp, score ); } - } - else - { - for( int i = 0; i < n_zones; i++ ) - { - if ( zones[i]->Alarmed() ) - { - if ( config.record_event_stats && state == ALARM ) - { + } else { + for( int i = 0; i < n_zones; i++ ) { + if ( zones[i]->Alarmed() ) { + if ( config.record_event_stats && state == ALARM ) { zones[i]->RecordStats( event ); } } @@ -1830,47 +1563,37 @@ bool Monitor::Analyse() } if ( event && noteSetMap.size() > 0 ) event->updateNotes( noteSetMap ); - } - else if ( state == ALERT ) - { + } else if ( state == ALERT ) { event->AddFrame( snap_image, *timestamp ); if ( noteSetMap.size() > 0 ) event->updateNotes( noteSetMap ); - } - else if ( state == TAPE ) - { + } else if ( state == TAPE ) { //Video Storage: activate only for supported cameras. Event::AddFrame knows whether or not we are recording video and saves frames accordingly - if((GetOptVideoWriter() == 2) && camera->SupportsNativeVideo()) - { - video_store_data->recording = true; - } - if ( !(image_count%(frame_skip+1)) ) - { - if ( config.bulk_frame_interval > 1 ) - { + //if((GetOptVideoWriter() == 2) && camera->SupportsNativeVideo()) { + // I don't think this is required, and causes problems, as the event file hasn't been setup yet. + //Warning("In state TAPE, + //video_store_data->recording = event->StartTime(); + //} + + if ( !(image_count%(frame_skip+1)) ) { + if ( config.bulk_frame_interval > 1 ) { event->AddFrame( snap_image, *timestamp, (event->Frames()AddFrame( snap_image, *timestamp ); } } } - } + } // end if ! IDLE } - } - else - { - if ( event ) - { + } else { + if ( event ) { Info( "%s: %03d - Closing event %d, trigger off", name, image_count, event->Id() ); closeEvent(); } shared_data->state = state = IDLE; last_section_mod = 0; } - if ( (!signal_change && signal) && (function == MODECT || function == MOCORD) ) - { + if ( (!signal_change && signal) && (function == MODECT || function == MOCORD) ) { if ( state == ALARM ) { ref_image.Blend( *snap_image, alarm_ref_blend_perc ); } else { @@ -1878,14 +1601,13 @@ bool Monitor::Analyse() } } last_signal = signal; - } + } // end if Enabled() shared_data->last_read_index = index%image_buffer_count; //shared_data->last_read_time = image_buffer[index].timestamp->tv_sec; shared_data->last_read_time = now.tv_sec; - if ( analysis_fps ) - { + if ( analysis_fps ) { // If analysis fps is set, add analysed image to dedicated pre event buffer int pre_index = image_count%pre_event_buffer_count; pre_event_buffer[pre_index].image->Assign(*snap->image); @@ -1897,8 +1619,7 @@ bool Monitor::Analyse() return( true ); } -void Monitor::Reload() -{ +void Monitor::Reload() { Debug( 1, "Reloading monitor %s", name ); if ( event ) @@ -1910,27 +1631,23 @@ void Monitor::Reload() // This seems to have fallen out of date. snprintf( sql, sizeof(sql), "select Function+0, Enabled, LinkedMonitors, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, WarmupCount, PreEventCount, PostEventCount, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, SignalCheckColour from Monitors where Id = '%d'", id ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_RES *result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } int n_monitors = mysql_num_rows( result ); - if ( n_monitors != 1 ) - { + if ( n_monitors != 1 ) { Error( "Bogus number of monitors, %d, returned. Can't reload", n_monitors ); return; } - if ( MYSQL_ROW dbrow = mysql_fetch_row( result ) ) - { + if ( MYSQL_ROW dbrow = mysql_fetch_row( result ) ) { int index = 0; function = (Function)atoi(dbrow[index++]); enabled = atoi(dbrow[index++]); @@ -1970,8 +1687,7 @@ void Monitor::Reload() ReloadLinkedMonitors( p_linked_monitors ); } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -1980,11 +1696,9 @@ void Monitor::Reload() ReloadZones(); } -void Monitor::ReloadZones() -{ +void Monitor::ReloadZones() { Debug( 1, "Reloading zones for monitor %s", name ); - for( int i = 0; i < n_zones; i++ ) - { + for( int i = 0; i < n_zones; i++ ) { delete zones[i]; } delete[] zones; @@ -1993,13 +1707,10 @@ void Monitor::ReloadZones() //DumpZoneImage(); } -void Monitor::ReloadLinkedMonitors( const char *p_linked_monitors ) -{ +void Monitor::ReloadLinkedMonitors( const char *p_linked_monitors ) { Debug( 1, "Reloading linked monitors for monitor %s, '%s'", name, p_linked_monitors ); - if ( n_linked_monitors ) - { - for( int i = 0; i < n_linked_monitors; i++ ) - { + if ( n_linked_monitors ) { + for( int i = 0; i < n_linked_monitors; i++ ) { delete linked_monitors[i]; } delete[] linked_monitors; @@ -2007,44 +1718,35 @@ void Monitor::ReloadLinkedMonitors( const char *p_linked_monitors ) } n_linked_monitors = 0; - if ( p_linked_monitors ) - { + if ( p_linked_monitors ) { int n_link_ids = 0; unsigned int link_ids[256]; char link_id_str[8]; char *dest_ptr = link_id_str; const char *src_ptr = p_linked_monitors; - while( 1 ) - { + while( 1 ) { dest_ptr = link_id_str; - while( *src_ptr >= '0' && *src_ptr <= '9' ) - { - if ( (dest_ptr-link_id_str) < (unsigned int)(sizeof(link_id_str)-1) ) - { + while( *src_ptr >= '0' && *src_ptr <= '9' ) { + if ( (dest_ptr-link_id_str) < (unsigned int)(sizeof(link_id_str)-1) ) { *dest_ptr++ = *src_ptr++; - } - else - { + } else { break; } } // Add the link monitor - if ( dest_ptr != link_id_str ) - { + if ( dest_ptr != link_id_str ) { *dest_ptr = '\0'; unsigned int link_id = atoi(link_id_str); - if ( link_id > 0 && link_id != id) - { + if ( link_id > 0 && link_id != id) { Debug( 3, "Found linked monitor id %d", link_id ); int j; - for ( j = 0; j < n_link_ids; j++ ) - { + for ( j = 0; j < n_link_ids; j++ ) { if ( link_ids[j] == link_id ) break; } - if ( j == n_link_ids ) // Not already found - { + if ( j == n_link_ids ) { + // Not already found link_ids[n_link_ids++] = link_id; } } @@ -2056,38 +1758,31 @@ void Monitor::ReloadLinkedMonitors( const char *p_linked_monitors ) if ( !*src_ptr ) break; } - if ( n_link_ids > 0 ) - { + if ( n_link_ids > 0 ) { Debug( 1, "Linking to %d monitors", n_link_ids ); linked_monitors = new MonitorLink *[n_link_ids]; int count = 0; - for ( int i = 0; i < n_link_ids; i++ ) - { + for ( int i = 0; i < n_link_ids; i++ ) { Debug( 1, "Checking linked monitor %d", link_ids[i] ); static char sql[ZM_SQL_SML_BUFSIZ]; snprintf( sql, sizeof(sql), "select Id, Name from Monitors where Id = %d and Function != 'None' and Function != 'Monitor' and Enabled = 1", link_ids[i] ); - if ( mysql_query( &dbconn, sql ) ) - { + if ( mysql_query( &dbconn, sql ) ) { Error( "Can't run query: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } MYSQL_RES *result = mysql_store_result( &dbconn ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } int n_monitors = mysql_num_rows( result ); - if ( n_monitors == 1 ) - { + if ( n_monitors == 1 ) { MYSQL_ROW dbrow = mysql_fetch_row( result ); Debug( 1, "Linking to monitor %d", link_ids[i] ); linked_monitors[count++] = new MonitorLink( link_ids[i], dbrow[1] ); - } - else - { + } else { Warning( "Can't link to monitor %d, invalid id, function or not enabled", link_ids[i] ); } mysql_free_result( result ); @@ -2098,8 +1793,7 @@ void Monitor::ReloadLinkedMonitors( const char *p_linked_monitors ) } #if ZM_HAS_V4L -int Monitor::LoadLocalMonitors( const char *device, Monitor **&monitors, Purpose purpose ) -{ +int Monitor::LoadLocalMonitors( const char *device, Monitor **&monitors, Purpose purpose ) { std::string sql = "select Id, Name, ServerId, Function+0, Enabled, LinkedMonitors, Device, Channel, Format, V4LMultiBuffer, V4LCapturesPerFrame, Method, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, SaveJPEGs, VideoWriter, EncoderParameters, RecordAudio, Brightness, Contrast, Hue, Colour, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, ImageBufferCount, WarmupCount, PreEventCount, PostEventCount, StreamReplayBuffer, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, SignalCheckColour, Exif from Monitors where Function != 'None' and Type = 'Local'"; if ( device[0] ) { sql += " AND Device='"; @@ -2120,8 +1814,7 @@ int Monitor::LoadLocalMonitors( const char *device, Monitor **&monitors, Purpose Debug( 1, "Got %d monitors", n_monitors ); delete[] monitors; monitors = new Monitor *[n_monitors]; - for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) - { + for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) { int col = 0; int id = atoi(dbrow[col]); col++; @@ -2162,7 +1855,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); unsigned int deinterlacing = atoi(dbrow[col]); col++; int savejpegs = atoi(dbrow[col]); col++; - int videowriter = atoi(dbrow[col]); col++; + VideoWriter videowriter = (VideoWriter)atoi(dbrow[col]); col++; std::string encoderparams = dbrow[col]; col++; bool record_audio = (*dbrow[col] != '0'); col++; @@ -2268,14 +1961,14 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); 0, 0 ); + camera->setMonitor( monitors[i] ); Zone **zones = 0; int n_zones = Zone::Load( monitors[i], zones ); monitors[i]->AddZones( n_zones, zones ); monitors[i]->AddPrivacyBitmask( zones ); Debug( 1, "Loaded monitor %d(%s), %d zones", id, name, n_zones ); } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -2286,8 +1979,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); } #endif // ZM_HAS_V4L -int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const char *port, const char *path, Monitor **&monitors, Purpose purpose ) -{ +int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const char *port, const char *path, Monitor **&monitors, Purpose purpose ) { std::string sql = "select Id, Name, ServerId, Function+0, Enabled, LinkedMonitors, Protocol, Method, Host, Port, Path, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, RTSPDescribe, SaveJPEGs, VideoWriter, EncoderParameters, RecordAudio, Brightness, Contrast, Hue, Colour, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, ImageBufferCount, WarmupCount, PreEventCount, PostEventCount, StreamReplayBuffer, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, Exif from Monitors where Function != 'None' and Type = 'Remote'"; if ( staticConfig.SERVER_ID ) { sql += stringtf( " AND ServerId=%d", staticConfig.SERVER_ID ); @@ -2307,8 +1999,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c Debug( 1, "Got %d monitors", n_monitors ); delete[] monitors; monitors = new Monitor *[n_monitors]; - for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) - { + for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) { int col = 0; int id = atoi(dbrow[col]); col++; @@ -2332,7 +2023,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c unsigned int deinterlacing = atoi(dbrow[col]); col++; bool rtsp_describe = (*dbrow[col] != '0'); col++; int savejpegs = atoi(dbrow[col]); col++; - int videowriter = atoi(dbrow[col]); col++; + VideoWriter videowriter = (VideoWriter)atoi(dbrow[col]); col++; std::string encoderparams = dbrow[col]; col++; bool record_audio = (*dbrow[col] != '0'); col++; @@ -2368,8 +2059,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c bool embed_exif = (*dbrow[col] != '0'); col++; Camera *camera = 0; - if ( protocol == "http" ) - { + if ( protocol == "http" ) { camera = new RemoteCameraHttp( id, method, @@ -2388,8 +2078,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c ); } #if HAVE_LIBAVFORMAT - else if ( protocol == "rtsp" ) - { + else if ( protocol == "rtsp" ) { camera = new RemoteCameraRtsp( id, method, @@ -2409,8 +2098,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c ); } #endif // HAVE_LIBAVFORMAT - else - { + else { Fatal( "Unexpected remote camera protocol '%s'", protocol.c_str() ); } @@ -2456,14 +2144,14 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c 0 ); + camera->setMonitor( monitors[i] ); Zone **zones = 0; int n_zones = Zone::Load( monitors[i], zones ); monitors[i]->AddZones( n_zones, zones ); monitors[i]->AddPrivacyBitmask( zones ); Debug( 1, "Loaded monitor %d(%s), %d zones", id, name.c_str(), n_zones ); } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -2473,8 +2161,7 @@ int Monitor::LoadRemoteMonitors( const char *protocol, const char *host, const c return( n_monitors ); } -int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose purpose ) -{ +int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose purpose ) { std::string sql = "select Id, Name, ServerId, Function+0, Enabled, LinkedMonitors, Path, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, SaveJPEGs, VideoWriter, EncoderParameters, RecordAudio, Brightness, Contrast, Hue, Colour, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, ImageBufferCount, WarmupCount, PreEventCount, PostEventCount, StreamReplayBuffer, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, Exif from Monitors where Function != 'None' and Type = 'File'"; if ( file[0] ) { sql += " AND Path='"; @@ -2486,8 +2173,7 @@ int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose pu } Debug( 1, "Loading File Monitors with %s", sql.c_str() ); MYSQL_RES *result = zmDbFetch( sql.c_str() ); - if ( !result ) - { + if ( !result ) { Error( "Can't use query result: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -2495,8 +2181,7 @@ int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose pu Debug( 1, "Got %d monitors", n_monitors ); delete[] monitors; monitors = new Monitor *[n_monitors]; - for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) - { + for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) { int col = 0; int id = atoi(dbrow[col]); col++; @@ -2516,7 +2201,7 @@ int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose pu unsigned int deinterlacing = atoi(dbrow[col]); col++; int savejpegs = atoi(dbrow[col]); col++; - int videowriter = atoi(dbrow[col]); col++; + VideoWriter videowriter = (VideoWriter)atoi(dbrow[col]); col++; std::string encoderparams = dbrow[col]; col++; bool record_audio = (*dbrow[col] != '0'); col++; @@ -2606,14 +2291,14 @@ int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose pu 0, 0 ); + camera->setMonitor( monitors[i] ); Zone **zones = 0; int n_zones = Zone::Load( monitors[i], zones ); monitors[i]->AddZones( n_zones, zones ); monitors[i]->AddPrivacyBitmask( zones ); Debug( 1, "Loaded monitor %d(%s), %d zones", id, name, n_zones ); } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -2624,8 +2309,7 @@ int Monitor::LoadFileMonitors( const char *file, Monitor **&monitors, Purpose pu } #if HAVE_LIBAVFORMAT -int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose purpose ) -{ +int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose purpose ) { std::string sql = "select Id, Name, ServerId, Function+0, Enabled, LinkedMonitors, Path, Method, Options, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, SaveJPEGs, VideoWriter, EncoderParameters, RecordAudio, Brightness, Contrast, Hue, Colour, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, ImageBufferCount, WarmupCount, PreEventCount, PostEventCount, StreamReplayBuffer, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, Exif from Monitors where Function != 'None' and Type = 'Ffmpeg'"; if ( file[0] ) { sql += " AND Path = '"; @@ -2646,8 +2330,7 @@ int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose Debug( 1, "Got %d monitors", n_monitors ); delete[] monitors; monitors = new Monitor *[n_monitors]; - for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) - { + for( int i = 0; MYSQL_ROW dbrow = mysql_fetch_row( result ); i++ ) { int col = 0; int id = atoi(dbrow[col]); col++; @@ -2669,7 +2352,7 @@ int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose unsigned int deinterlacing = atoi(dbrow[col]); col++; int savejpegs = atoi(dbrow[col]); col++; - int videowriter = atoi(dbrow[col]); col++; + VideoWriter videowriter = (VideoWriter)atoi(dbrow[col]); col++; std::string encoderparams = dbrow[col]; col++; bool record_audio = (*dbrow[col] != '0'); col++; @@ -2761,14 +2444,14 @@ int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose 0, 0 ); + camera->setMonitor( monitors[i] ); Zone **zones = 0; int n_zones = Zone::Load( monitors[i], zones ); monitors[i]->AddZones( n_zones, zones ); monitors[i]->AddPrivacyBitmask( zones ); Debug( 1, "Loaded monitor %d(%s), %d zones", id, name, n_zones ); } - if ( mysql_errno( &dbconn ) ) - { + if ( mysql_errno( &dbconn ) ) { Error( "Can't fetch row: %s", mysql_error( &dbconn ) ); exit( mysql_errno( &dbconn ) ); } @@ -2779,8 +2462,7 @@ int Monitor::LoadFfmpegMonitors( const char *file, Monitor **&monitors, Purpose } #endif // HAVE_LIBAVFORMAT -Monitor *Monitor::Load( unsigned int p_id, bool load_zones, Purpose purpose ) -{ +Monitor *Monitor::Load( unsigned int p_id, bool load_zones, Purpose purpose ) { std::string sql = stringtf( "select Id, Name, ServerId, Type, Function+0, Enabled, LinkedMonitors, Device, Channel, Format, V4LMultiBuffer, V4LCapturesPerFrame, Protocol, Method, Host, Port, Path, Options, User, Pass, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, RTSPDescribe, SaveJPEGs, VideoWriter, EncoderParameters, RecordAudio, Brightness, Contrast, Hue, Colour, EventPrefix, LabelFormat, LabelX, LabelY, LabelSize, ImageBufferCount, WarmupCount, PreEventCount, PostEventCount, StreamReplayBuffer, AlarmFrameCount, SectionLength, FrameSkip, MotionFrameSkip, AnalysisFPS, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS, FPSReportInterval, RefBlendPerc, AlarmRefBlendPerc, TrackMotion, SignalCheckColour, Exif from Monitors where Id = %d", p_id ); MYSQL_ROW dbrow = zmDbFetchOne( sql.c_str() ); @@ -2839,7 +2521,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); unsigned int deinterlacing = atoi(dbrow[col]); col++; bool rtsp_describe = (*dbrow[col] != '0'); col++; int savejpegs = atoi(dbrow[col]); col++; - int videowriter = atoi(dbrow[col]); col++; + VideoWriter videowriter = (VideoWriter)atoi(dbrow[col]); col++; std::string encoderparams = dbrow[col]; col++; bool record_audio = (*dbrow[col] != '0'); col++; @@ -2884,8 +2566,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); int extras = (deinterlacing>>24)&0xff; Camera *camera = 0; - if ( type == "Local" ) - { + if ( type == "Local" ) { #if ZM_HAS_V4L camera = new LocalCamera( id, @@ -2910,11 +2591,8 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); #else // ZM_HAS_V4L Fatal( "You must have video4linux libraries and headers installed to use local analog or USB cameras for monitor %d", id ); #endif // ZM_HAS_V4L - } - else if ( type == "Remote" ) - { - if ( protocol == "http" ) - { + } else if ( type == "Remote" ) { + if ( protocol == "http" ) { camera = new RemoteCameraHttp( id, method.c_str(), @@ -2931,9 +2609,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); purpose==CAPTURE, record_audio ); - } - else if ( protocol == "rtsp" ) - { + } else if ( protocol == "rtsp" ) { #if HAVE_LIBAVFORMAT camera = new RemoteCameraRtsp( id, @@ -2955,14 +2631,10 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); #else // HAVE_LIBAVFORMAT Fatal( "You must have ffmpeg libraries installed to use remote camera protocol '%s' for monitor %d", protocol.c_str(), id ); #endif // HAVE_LIBAVFORMAT - } - else - { + } else { Fatal( "Unexpected remote camera protocol '%s' for monitor %d", protocol.c_str(), id ); } - } - else if ( type == "File" ) - { + } else if ( type == "File" ) { camera = new FileCamera( id, path.c_str(), @@ -2976,9 +2648,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); purpose==CAPTURE, record_audio ); - } - else if ( type == "Ffmpeg" ) - { + } else if ( type == "Ffmpeg" ) { #if HAVE_LIBAVFORMAT camera = new FfmpegCamera( id, @@ -2998,9 +2668,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); #else // HAVE_LIBAVFORMAT Fatal( "You must have ffmpeg libraries installed to use ffmpeg cameras for monitor %d", id ); #endif // HAVE_LIBAVFORMAT - } - else if (type == "Libvlc") - { + } else if (type == "Libvlc") { #if HAVE_LIBVLC camera = new LibvlcCamera( id, @@ -3020,9 +2688,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); #else // HAVE_LIBVLC Fatal( "You must have vlc libraries installed to use vlc cameras for monitor %d", id ); #endif // HAVE_LIBVLC - } - else if ( type == "cURL" ) - { + } else if ( type == "cURL" ) { #if HAVE_LIBCURL camera = new cURLCamera( id, @@ -3042,9 +2708,7 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); #else // HAVE_LIBCURL Fatal( "You must have libcurl installed to use ffmpeg cameras for monitor %d", id ); #endif // HAVE_LIBCURL - } - else - { + } else { Fatal( "Bogus monitor type '%s' for monitor %d", type.c_str(), id ); } monitor = new Monitor( @@ -3090,9 +2754,9 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); ); + camera->setMonitor( monitor ); int n_zones = 0; - if ( load_zones ) - { + if ( load_zones ) { Zone **zones = 0; n_zones = Zone::Load( monitor, zones ); monitor->AddZones( n_zones, zones ); @@ -3102,15 +2766,19 @@ Debug( 1, "Got %d for v4l_captures_per_frame", v4l_captures_per_frame ); return( monitor ); } -int Monitor::Capture() -{ - static int FirstCapture = 1; +/* Returns 0 on success, even if no new images are available (transient error) + * Returns -1 on failure. + */ +int Monitor::Capture() { + static int FirstCapture = 1; // Used in de-interlacing to indicate whether this is the even or odd image int captureResult; - int index = image_count%image_buffer_count; + unsigned int index = image_count%image_buffer_count; Image* capture_image = image_buffer[index].image; - if ( (deinterlacing & 0xff) == 4) { + unsigned int deinterlacing_value = deinterlacing & 0xff; + + if ( deinterlacing_value == 4 ) { if ( FirstCapture != 1 ) { /* Copy the next image into the shared memory */ capture_image->CopyBuffer(*(next_buffer.image)); @@ -3119,9 +2787,9 @@ int Monitor::Capture() /* Capture a new next image */ //Check if FFMPEG camera - if((GetOptVideoWriter() == 2) && camera->SupportsNativeVideo()){ + if ( ( videowriter == H264PASSTHROUGH ) && camera->SupportsNativeVideo() ) { captureResult = camera->CaptureAndRecord(*(next_buffer.image), video_store_data->recording, video_store_data->event_file); - }else{ + } else { captureResult = camera->Capture(*(next_buffer.image)); } @@ -3132,7 +2800,7 @@ int Monitor::Capture() } else { //Check if FFMPEG camera - if((GetOptVideoWriter() == 2) && camera->SupportsNativeVideo()){ + if ( (videowriter == H264PASSTHROUGH ) && camera->SupportsNativeVideo() ) { //Warning("ZMC: Recording: %d", video_store_data->recording); captureResult = camera->CaptureAndRecord(*capture_image, video_store_data->recording, video_store_data->event_file); }else{ @@ -3141,14 +2809,13 @@ int Monitor::Capture() } } - if((GetOptVideoWriter() == 2) && captureResult > 0) - { + // CaptureAndRecord returns # of frames captured I think + if ( ( videowriter == H264PASSTHROUGH ) && ( captureResult > 0 ) ) { //video_store_data->frameNumber = captureResult; captureResult = 0; } - if ( captureResult != 0 ) - { + if ( captureResult != 0 ) { // Unable to capture image for temporary reason // Fake a signal loss image Rgb signalcolor; @@ -3159,27 +2826,24 @@ int Monitor::Capture() captureResult = 1; } - if ( captureResult == 1 ) - { + if ( captureResult == 1 ) { /* Deinterlacing */ - if ( (deinterlacing & 0xff) == 1 ) { + if ( deinterlacing_value == 1 ) { capture_image->Deinterlace_Discard(); - } else if ( (deinterlacing & 0xff) == 2 ) { + } else if ( deinterlacing_value == 2 ) { capture_image->Deinterlace_Linear(); - } else if ( (deinterlacing & 0xff) == 3 ) { + } else if ( deinterlacing_value == 3 ) { capture_image->Deinterlace_Blend(); - } else if ( (deinterlacing & 0xff) == 4 ) { + } else if ( deinterlacing_value == 4 ) { capture_image->Deinterlace_4Field( next_buffer.image, (deinterlacing>>8)&0xff ); - } else if ( (deinterlacing & 0xff) == 5 ) { + } else if ( deinterlacing_value == 5 ) { capture_image->Deinterlace_Blend_CustomRatio( (deinterlacing>>8)&0xff ); } - if ( orientation != ROTATE_0 ) - { - switch ( orientation ) - { + if ( orientation != ROTATE_0 ) { + switch ( orientation ) { case ROTATE_0 : { // No action required @@ -3200,25 +2864,19 @@ int Monitor::Capture() } } } - } // end if captureResults == 1 - // if true? let's get rid of this. - if ( true ) { - if ( capture_image->Size() > camera->ImageSize() ) - { + if ( capture_image->Size() > camera->ImageSize() ) { Error( "Captured image %d does not match expected size %d check width, height and colour depth",capture_image->Size(),camera->ImageSize() ); return( -1 ); } - if ( ((unsigned int)index == shared_data->last_read_index) && (function > MONITOR) ) - { + if ( (index == shared_data->last_read_index) && (function > MONITOR) ) { Warning( "Buffer overrun at index %d, image %d, slow down capture, speed up analysis or increase ring buffer size", index, image_count ); time_t now = time(0); double approxFps = double(image_buffer_count)/double(now-image_buffer[index].timestamp->tv_sec); time_t last_read_delta = now - shared_data->last_read_time; - if ( last_read_delta > (image_buffer_count/approxFps) ) - { + if ( last_read_delta > (image_buffer_count/approxFps) ) { Warning( "Last image read from shared memory %ld seconds ago, zma may have gone away", last_read_delta ) shared_data->last_read_index = image_buffer_count; } @@ -3228,8 +2886,7 @@ int Monitor::Capture() capture_image->MaskPrivacy( privacy_bitmask ); gettimeofday( image_buffer[index].timestamp, NULL ); - if ( config.timestamp_on_capture ) - { + if ( config.timestamp_on_capture ) { TimestampImage( capture_image, image_buffer[index].timestamp ); } shared_data->signal = CheckSignal(capture_image); @@ -3238,8 +2895,7 @@ int Monitor::Capture() image_count++; - if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) - { + if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) { time_t now = image_buffer[index].timestamp->tv_sec; fps = double(fps_report_interval)/(now-last_fps_time); //Info( "%d -> %d -> %d", fps_report_interval, now, last_fps_time ); @@ -3248,16 +2904,14 @@ int Monitor::Capture() last_fps_time = now; } - if ( shared_data->action & GET_SETTINGS ) - { + if ( shared_data->action & GET_SETTINGS ) { shared_data->brightness = camera->Brightness(); shared_data->hue = camera->Hue(); shared_data->colour = camera->Colour(); shared_data->contrast = camera->Contrast(); shared_data->action &= ~GET_SETTINGS; } - if ( shared_data->action & SET_SETTINGS ) - { + if ( shared_data->action & SET_SETTINGS ) { camera->Brightness( shared_data->brightness ); camera->Hue( shared_data->hue ); camera->Colour( shared_data->colour ); @@ -3265,15 +2919,13 @@ int Monitor::Capture() shared_data->action &= ~SET_SETTINGS; } return( 0 ); - } + } // end if captureResults == 1 shared_data->signal = false; return( -1 ); } -void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) const -{ - if ( label_format[0] ) - { +void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) const { + if ( label_format[0] ) { // Expand the strftime macros first char label_time_text[256]; strftime( label_time_text, sizeof(label_time_text), label_format, localtime( &ts_time->tv_sec ) ); @@ -3281,13 +2933,10 @@ void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) c char label_text[1024]; const char *s_ptr = label_time_text; char *d_ptr = label_text; - while ( *s_ptr && ((d_ptr-label_text) < (unsigned int)sizeof(label_text)) ) - { - if ( *s_ptr == '%' ) - { + while ( *s_ptr && ((d_ptr-label_text) < (unsigned int)sizeof(label_text)) ) { + if ( *s_ptr == '%' ) { bool found_macro = false; - switch ( *(s_ptr+1) ) - { + switch ( *(s_ptr+1) ) { case 'N' : d_ptr += snprintf( d_ptr, sizeof(label_text)-(d_ptr-label_text), "%s", name ); found_macro = true; @@ -3301,8 +2950,7 @@ void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) c found_macro = true; break; } - if ( found_macro ) - { + if ( found_macro ) { s_ptr += 2; continue; } @@ -3315,32 +2963,27 @@ void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) c } bool Monitor::closeEvent() { - if (event) - { - if ( function == RECORD || function == MOCORD ) - { + if ( event ) { + if ( function == RECORD || function == MOCORD ) { gettimeofday( &(event->EndTime()), NULL ); } delete event; - video_store_data->recording = false; + video_store_data->recording = (struct timeval){0};; event = 0; return( true ); } return( false ); } -unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &zoneSet ) -{ +unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &zoneSet ) { bool alarm = false; unsigned int score = 0; if ( n_zones <= 0 ) return( alarm ); - if ( config.record_diag_images ) - { + if ( config.record_diag_images ) { static char diag_path[PATH_MAX] = ""; - if ( !diag_path[0] ) - { + if ( !diag_path[0] ) { snprintf( diag_path, sizeof(diag_path), "%s/%d/diag-r.jpg", config.dir_events, id ); } ref_image.WriteJpeg( diag_path ); @@ -3348,25 +2991,21 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z ref_image.Delta( comp_image, &delta_image); - if ( config.record_diag_images ) - { + if ( config.record_diag_images ) { static char diag_path[PATH_MAX] = ""; - if ( !diag_path[0] ) - { + if ( !diag_path[0] ) { snprintf( diag_path, sizeof(diag_path), "%s/%d/diag-d.jpg", config.dir_events, id ); } delta_image.WriteJpeg( diag_path ); } // Blank out all exclusion zones - for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) - { + for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) { Zone *zone = zones[n_zone]; // need previous alarmed state for preclusive zone, so don't clear just yet if (!zone->IsPreclusive()) zone->ClearAlarm(); - if ( !zone->IsInactive() ) - { + if ( !zone->IsInactive() ) { continue; } Debug( 3, "Blanking inactive zone %s", zone->Label() ); @@ -3374,18 +3013,15 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z } // Check preclusive zones first - for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) - { + for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) { Zone *zone = zones[n_zone]; - if ( !zone->IsPreclusive() ) - { + if ( !zone->IsPreclusive() ) { continue; } int old_zone_score = zone->Score(); bool old_zone_alarmed = zone->Alarmed(); Debug( 3, "Checking preclusive zone %s - old score: %d, state: %s", zone->Label(),old_zone_score, zone->Alarmed()?"alarmed":"quiet" ); - if ( zone->CheckAlarms( &delta_image ) ) - { + if ( zone->CheckAlarms( &delta_image ) ) { alarm = true; score += zone->Score(); zone->SetAlarm(); @@ -3401,7 +3037,7 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z } if (zone->CheckExtendAlarmCount()) { alarm=true; - zone->SetAlarm(); + zone->SetAlarm(); } else { zone->ClearAlarm(); } @@ -3412,33 +3048,25 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z Coord alarm_centre; int top_score = -1; - if ( alarm ) - { + if ( alarm ) { alarm = false; score = 0; - } - else - { + } else { // Find all alarm pixels in active zones - for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) - { + for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) { Zone *zone = zones[n_zone]; - if ( !zone->IsActive() || zone->IsPreclusive()) - { + if ( !zone->IsActive() || zone->IsPreclusive()) { continue; } Debug( 3, "Checking active zone %s", zone->Label() ); - if ( zone->CheckAlarms( &delta_image ) ) - { + if ( zone->CheckAlarms( &delta_image ) ) { alarm = true; score += zone->Score(); zone->SetAlarm(); Debug( 3, "Zone is alarmed, zone score = %d", zone->Score() ); zoneSet.insert( zone->Label() ); - if ( config.opt_control && track_motion ) - { - if ( (int)zone->Score() > top_score ) - { + if ( config.opt_control && track_motion ) { + if ( (int)zone->Score() > top_score ) { top_score = zone->Score(); alarm_centre = zone->GetAlarmCentre(); } @@ -3446,47 +3074,36 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z } } - if ( alarm ) - { - for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) - { + if ( alarm ) { + for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) { Zone *zone = zones[n_zone]; - if ( !zone->IsInclusive() ) - { + if ( !zone->IsInclusive() ) { continue; } Debug( 3, "Checking inclusive zone %s", zone->Label() ); - if ( zone->CheckAlarms( &delta_image ) ) - { + if ( zone->CheckAlarms( &delta_image ) ) { alarm = true; score += zone->Score(); zone->SetAlarm(); Debug( 3, "Zone is alarmed, zone score = %d", zone->Score() ); zoneSet.insert( zone->Label() ); - if ( config.opt_control && track_motion ) - { - if ( zone->Score() > (unsigned int)top_score ) - { + if ( config.opt_control && track_motion ) { + if ( zone->Score() > (unsigned int)top_score ) { top_score = zone->Score(); alarm_centre = zone->GetAlarmCentre(); } } } } - } - else - { + } else { // Find all alarm pixels in exclusive zones - for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) - { + for ( int n_zone = 0; n_zone < n_zones; n_zone++ ) { Zone *zone = zones[n_zone]; - if ( !zone->IsExclusive() ) - { + if ( !zone->IsExclusive() ) { continue; } Debug( 3, "Checking exclusive zone %s", zone->Label() ); - if ( zone->CheckAlarms( &delta_image ) ) - { + if ( zone->CheckAlarms( &delta_image ) ) { alarm = true; score += zone->Score(); zone->SetAlarm(); @@ -3497,15 +3114,12 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z } } - if ( top_score > 0 ) - { + if ( top_score > 0 ) { shared_data->alarm_x = alarm_centre.X(); shared_data->alarm_y = alarm_centre.Y(); Info( "Got alarm centre at %d,%d, at count %d", shared_data->alarm_x, shared_data->alarm_y, image_count ); - } - else - { + } else { shared_data->alarm_x = shared_data->alarm_y = -1; } @@ -3513,44 +3127,36 @@ unsigned int Monitor::DetectMotion( const Image &comp_image, Event::StringSet &z return( score?score:alarm ); } -bool Monitor::DumpSettings( char *output, bool verbose ) -{ +bool Monitor::DumpSettings( char *output, bool verbose ) { output[0] = 0; sprintf( output+strlen(output), "Id : %d\n", id ); sprintf( output+strlen(output), "Name : %s\n", name ); sprintf( output+strlen(output), "Type : %s\n", camera->IsLocal()?"Local":(camera->IsRemote()?"Remote":"File") ); #if ZM_HAS_V4L - if ( camera->IsLocal() ) - { + if ( camera->IsLocal() ) { sprintf( output+strlen(output), "Device : %s\n", ((LocalCamera *)camera)->Device().c_str() ); sprintf( output+strlen(output), "Channel : %d\n", ((LocalCamera *)camera)->Channel() ); sprintf( output+strlen(output), "Standard : %d\n", ((LocalCamera *)camera)->Standard() ); - } - else + } else #endif // ZM_HAS_V4L - if ( camera->IsRemote() ) - { + if ( camera->IsRemote() ) { sprintf( output+strlen(output), "Protocol : %s\n", ((RemoteCamera *)camera)->Protocol().c_str() ); sprintf( output+strlen(output), "Host : %s\n", ((RemoteCamera *)camera)->Host().c_str() ); sprintf( output+strlen(output), "Port : %s\n", ((RemoteCamera *)camera)->Port().c_str() ); sprintf( output+strlen(output), "Path : %s\n", ((RemoteCamera *)camera)->Path().c_str() ); - } - else if ( camera->IsFile() ) - { + } else if ( camera->IsFile() ) { sprintf( output+strlen(output), "Path : %s\n", ((FileCamera *)camera)->Path() ); } #if HAVE_LIBAVFORMAT - else if ( camera->IsFfmpeg() ) - { + else if ( camera->IsFfmpeg() ) { sprintf( output+strlen(output), "Path : %s\n", ((FfmpegCamera *)camera)->Path().c_str() ); } #endif // HAVE_LIBAVFORMAT sprintf( output+strlen(output), "Width : %d\n", camera->Width() ); sprintf( output+strlen(output), "Height : %d\n", camera->Height() ); #if ZM_HAS_V4L - if ( camera->IsLocal() ) - { + if ( camera->IsLocal() ) { sprintf( output+strlen(output), "Palette : %d\n", ((LocalCamera *)camera)->Palette() ); } #endif // ZM_HAS_V4L @@ -3581,78 +3187,61 @@ bool Monitor::DumpSettings( char *output, bool verbose ) function==NODECT?"Externally Triggered only, no Motion Detection":"Unknown" )))))); sprintf( output+strlen(output), "Zones : %d\n", n_zones ); - for ( int i = 0; i < n_zones; i++ ) - { + for ( int i = 0; i < n_zones; i++ ) { zones[i]->DumpSettings( output+strlen(output), verbose ); } return( true ); } // bool Monitor::DumpSettings( char *output, bool verbose ) -bool MonitorStream::checkSwapPath( const char *path, bool create_path ) -{ +bool MonitorStream::checkSwapPath( const char *path, bool create_path ) { uid_t uid = getuid(); gid_t gid = getgid(); struct stat stat_buf; - if ( stat( path, &stat_buf ) < 0 ) - { - if ( create_path && errno == ENOENT ) - { + if ( stat( path, &stat_buf ) < 0 ) { + if ( create_path && errno == ENOENT ) { Debug( 3, "Swap path '%s' missing, creating", path ); - if ( mkdir( path, 0755 ) ) - { + if ( mkdir( path, 0755 ) ) { Error( "Can't mkdir %s: %s", path, strerror(errno)); return( false ); } - if ( stat( path, &stat_buf ) < 0 ) - { + if ( stat( path, &stat_buf ) < 0 ) { Error( "Can't stat '%s': %s", path, strerror(errno) ); return( false ); } - } - else - { + } else { Error( "Can't stat '%s': %s", path, strerror(errno) ); return( false ); } } - if ( !S_ISDIR(stat_buf.st_mode) ) - { + if ( !S_ISDIR(stat_buf.st_mode) ) { Error( "Swap image path '%s' is not a directory", path ); return( false ); } mode_t mask = 0; - if ( uid == stat_buf.st_uid ) - { + if ( uid == stat_buf.st_uid ) { // If we are the owner mask = 00700; - } - else if ( gid == stat_buf.st_gid ) - { + } else if ( gid == stat_buf.st_gid ) { // If we are in the owner group mask = 00070; - } - else - { + } else { // We are neither the owner nor in the group mask = 00007; } - if ( (stat_buf.st_mode & mask) != mask ) - { + if ( (stat_buf.st_mode & mask) != mask ) { Error( "Insufficient permissions on swap image path '%s'", path ); return( false ); } return( true ); } -void MonitorStream::processCommand( const CmdMsg *msg ) -{ +void MonitorStream::processCommand( const CmdMsg *msg ) { Debug( 2, "Got message, type %d, msg %d", msg->msg_type, msg->msg_data[0] ); // Check for incoming command - switch( (MsgCommand)msg->msg_data[0] ) - { + switch( (MsgCommand)msg->msg_data[0] ) { case CMD_PAUSE : { Debug( 1, "Got PAUSE command" ); @@ -3914,8 +3503,7 @@ void MonitorStream::processCommand( const CmdMsg *msg ) status_msg.msg_type = MSG_DATA_WATCH; memcpy( &status_msg.msg_data, &status_data, sizeof(status_data) ); int nbytes = 0; - if ( (nbytes = sendto( sd, &status_msg, sizeof(status_msg), MSG_DONTWAIT, (sockaddr *)&rem_addr, sizeof(rem_addr) )) < 0 ) - { + if ( (nbytes = sendto( sd, &status_msg, sizeof(status_msg), MSG_DONTWAIT, (sockaddr *)&rem_addr, sizeof(rem_addr) )) < 0 ) { //if ( errno != EAGAIN ) { Error( "Can't sendto on sd %d: %s", sd, strerror(errno) ); @@ -3930,8 +3518,7 @@ void MonitorStream::processCommand( const CmdMsg *msg ) updateFrameRate( monitor->GetFPS() ); } -bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) -{ +bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) { bool send_raw = ((scale>=ZM_SCALE_BASE)&&(zoom==ZM_SCALE_BASE)); if ( type != STREAM_JPEG ) @@ -3939,25 +3526,19 @@ bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) if ( !config.timestamp_on_capture && timestamp ) send_raw = false; - if ( !send_raw ) - { + if ( !send_raw ) { Image temp_image( filepath ); return( sendFrame( &temp_image, timestamp ) ); - } - else - { + } else { int img_buffer_size = 0; static unsigned char img_buffer[ZM_MAX_IMAGE_SIZE]; FILE *fdj = NULL; - if ( (fdj = fopen( filepath, "r" )) ) - { + if ( (fdj = fopen( filepath, "r" )) ) { img_buffer_size = fread( img_buffer, 1, sizeof(img_buffer), fdj ); fclose( fdj ); - } - else - { + } else { Error( "Can't open %s: %s", filepath, strerror(errno) ); return( false ); } @@ -3969,8 +3550,7 @@ bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) fprintf( stdout, "--ZoneMinderFrame\r\n" ); fprintf( stdout, "Content-Length: %d\r\n", img_buffer_size ); fprintf( stdout, "Content-Type: image/jpeg\r\n\r\n" ); - if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) - { + if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { if ( !zm_terminate ) Error( "Unable to send stream frame: %s", strerror(errno) ); return( false ); @@ -3982,8 +3562,7 @@ bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) gettimeofday( &frameEndTime, NULL ); int frameSendTime = tvDiffMsec( frameStartTime, frameEndTime ); - if ( frameSendTime > 1000/maxfps ) - { + if ( frameSendTime > 1000/maxfps ) { maxfps /= 2; Error( "Frame send time %d msec too slow, throttling maxfps to %.2f", frameSendTime, maxfps ); } @@ -3995,17 +3574,14 @@ bool MonitorStream::sendFrame( const char *filepath, struct timeval *timestamp ) return( false ); } -bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) -{ +bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) { Image *send_image = prepareImage( image ); if ( !config.timestamp_on_capture && timestamp ) monitor->TimestampImage( send_image, timestamp ); #if HAVE_LIBAVCODEC - if ( type == STREAM_MPEG ) - { - if ( !vid_stream ) - { + if ( type == STREAM_MPEG ) { + if ( !vid_stream ) { vid_stream = new VideoStream( "pipe:", format, bitrate, effective_fps, send_image->Colours(), send_image->SubpixelOrder(), send_image->Width(), send_image->Height() ); fprintf( stdout, "Content-type: %s\r\n\r\n", vid_stream->MimeType() ); vid_stream->OpenStream(); @@ -4016,8 +3592,7 @@ bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) base_time = *timestamp; DELTA_TIMEVAL( delta_time, *timestamp, base_time, DT_PREC_3 ); /* double pts = */ vid_stream->EncodeFrame( send_image->Buffer(), send_image->Size(), config.mpeg_timed_frames, delta_time.delta ); - } - else + } else #endif // HAVE_LIBAVCODEC { static unsigned char temp_img_buffer[ZM_MAX_IMAGE_SIZE]; @@ -4030,8 +3605,7 @@ bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) gettimeofday( &frameStartTime, NULL ); fprintf( stdout, "--ZoneMinderFrame\r\n" ); - switch( type ) - { + switch( type ) { case STREAM_JPEG : send_image->EncodeJpeg( img_buffer, &img_buffer_size ); fprintf( stdout, "Content-Type: image/jpeg\r\n" ); @@ -4052,8 +3626,7 @@ bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) break; } fprintf( stdout, "Content-Length: %d\r\n\r\n", img_buffer_size ); - if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) - { + if ( fwrite( img_buffer, img_buffer_size, 1, stdout ) != 1 ) { if ( !zm_terminate ) Error( "Unable to send stream frame: %s", strerror(errno) ); return( false ); @@ -4065,8 +3638,7 @@ bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) gettimeofday( &frameEndTime, NULL ); int frameSendTime = tvDiffMsec( frameStartTime, frameEndTime ); - if ( frameSendTime > 1000/maxfps ) - { + if ( frameSendTime > 1000/maxfps ) { maxfps /= 1.5; Error( "Frame send time %d msec too slow, throttling maxfps to %.2f", frameSendTime, maxfps ); } @@ -4075,10 +3647,8 @@ bool MonitorStream::sendFrame( Image *image, struct timeval *timestamp ) return( true ); } -void MonitorStream::runStream() -{ - if ( type == STREAM_SINGLE ) - { +void MonitorStream::runStream() { + if ( type == STREAM_SINGLE ) { // Not yet migrated over to stream class monitor->SingleImage( scale ); return; @@ -4155,28 +3725,23 @@ void MonitorStream::runStream() } float max_secs_since_last_sent_frame = 10.0; //should be > keep alive amount (5 secs) - while ( !zm_terminate ) - { + while ( !zm_terminate ) { bool got_command = false; - if ( feof( stdout ) || ferror( stdout ) || !monitor->ShmValid() ) - { + if ( feof( stdout ) || ferror( stdout ) || !monitor->ShmValid() ) { break; } gettimeofday( &now, NULL ); - if ( connkey ) - { + if ( connkey ) { while(checkCommandQueue()) { got_command = true; } } //bool frame_sent = false; - if ( buffered_playback && delayed ) - { - if ( temp_read_index == temp_write_index ) - { + if ( buffered_playback && delayed ) { + if ( temp_read_index == temp_write_index ) { // Go back to live viewing Debug( 1, "Exceeded temporary streaming buffer" ); // Clear paused flag @@ -4184,34 +3749,26 @@ void MonitorStream::runStream() // Clear delayed_play flag delayed = false; replay_rate = ZM_RATE_BASE; - } - else - { - if ( !paused ) - { + } else { + if ( !paused ) { int temp_index = MOD_ADD( temp_read_index, 0, temp_image_buffer_count ); //Debug( 3, "tri: %d, ti: %d", temp_read_index, temp_index ); SwapImage *swap_image = &temp_image_buffer[temp_index]; - if ( !swap_image->valid ) - { + if ( !swap_image->valid ) { paused = true; delayed = true; temp_read_index = MOD_ADD( temp_read_index, (replay_rate>=0?-1:1), temp_image_buffer_count ); - } - else - { + } else { //Debug( 3, "siT: %f, lfT: %f", TV_2_FLOAT( swap_image->timestamp ), TV_2_FLOAT( last_frame_timestamp ) ); double expected_delta_time = ((TV_2_FLOAT( swap_image->timestamp ) - TV_2_FLOAT( last_frame_timestamp )) * ZM_RATE_BASE)/replay_rate; double actual_delta_time = TV_2_FLOAT( now ) - last_frame_sent; //Debug( 3, "eDT: %.3lf, aDT: %.3f, lFS:%.3f, NOW:%.3f", expected_delta_time, actual_delta_time, last_frame_sent, TV_2_FLOAT( now ) ); // If the next frame is due - if ( actual_delta_time > expected_delta_time ) - { + if ( actual_delta_time > expected_delta_time ) { //Debug( 2, "eDT: %.3lf, aDT: %.3f", expected_delta_time, actual_delta_time ); - if ( temp_index%frame_mod == 0 ) - { + if ( temp_index%frame_mod == 0 ) { Debug( 2, "Sending delayed frame %d", temp_index ); // Send the next frame if ( !sendFrame( temp_image_buffer[temp_index].file_name, &temp_image_buffer[temp_index].timestamp ) ) @@ -4222,9 +3779,7 @@ void MonitorStream::runStream() temp_read_index = MOD_ADD( temp_read_index, (replay_rate>0?1:-1), temp_image_buffer_count ); } } - } - else if ( step != 0 ) - { + } else if ( step != 0 ) { temp_read_index = MOD_ADD( temp_read_index, (step>0?1:-1), temp_image_buffer_count ); SwapImage *swap_image = &temp_image_buffer[temp_read_index]; @@ -4235,14 +3790,11 @@ void MonitorStream::runStream() memcpy( &last_frame_timestamp, &(swap_image->timestamp), sizeof(last_frame_timestamp) ); //frame_sent = true; step = 0; - } - else - { + } else { int temp_index = MOD_ADD( temp_read_index, 0, temp_image_buffer_count ); double actual_delta_time = TV_2_FLOAT( now ) - last_frame_sent; - if ( got_command || actual_delta_time > 5 ) - { + if ( got_command || actual_delta_time > 5 ) { // Send keepalive Debug( 2, "Sending keepalive frame %d", temp_index ); // Send the next frame @@ -4252,8 +3804,7 @@ void MonitorStream::runStream() } } } - if ( temp_read_index == temp_write_index ) - { + if ( temp_read_index == temp_write_index ) { // Go back to live viewing Warning( "Rewound over write index, resuming live play" ); // Clear paused flag @@ -4263,15 +3814,12 @@ void MonitorStream::runStream() replay_rate = ZM_RATE_BASE; } } - if ( (unsigned int)last_read_index != monitor->shared_data->last_write_index ) - { + if ( (unsigned int)last_read_index != monitor->shared_data->last_write_index ) { int index = monitor->shared_data->last_write_index%monitor->image_buffer_count; last_read_index = monitor->shared_data->last_write_index; //Debug( 1, "%d: %x - %x", index, image_buffer[index].image, image_buffer[index].image->buffer ); - if ( (frame_mod == 1) || ((frame_count%frame_mod) == 0) ) - { - if ( !paused && !delayed ) - { + if ( (frame_mod == 1) || ((frame_count%frame_mod) == 0) ) { + if ( !paused && !delayed ) { // Send the next frame Monitor::Snapshot *snap = &monitor->image_buffer[index]; @@ -4283,24 +3831,19 @@ void MonitorStream::runStream() temp_read_index = temp_write_index; } } - if ( buffered_playback ) - { - if ( monitor->shared_data->valid ) - { - if ( monitor->image_buffer[index].timestamp->tv_sec ) - { + if ( buffered_playback ) { + if ( monitor->shared_data->valid ) { + if ( monitor->image_buffer[index].timestamp->tv_sec ) { int temp_index = temp_write_index%temp_image_buffer_count; Debug( 2, "Storing frame %d", temp_index ); - if ( !temp_image_buffer[temp_index].valid ) - { + if ( !temp_image_buffer[temp_index].valid ) { snprintf( temp_image_buffer[temp_index].file_name, sizeof(temp_image_buffer[0].file_name), "%s/zmswap-i%05d.jpg", swap_path, temp_index ); temp_image_buffer[temp_index].valid = true; } memcpy( &(temp_image_buffer[temp_index].timestamp), monitor->image_buffer[index].timestamp, sizeof(temp_image_buffer[0].timestamp) ); monitor->image_buffer[index].image->WriteJpeg( temp_image_buffer[temp_index].file_name, config.jpeg_file_quality ); temp_write_index = MOD_ADD( temp_write_index, 1, temp_image_buffer_count ); - if ( temp_write_index == temp_read_index ) - { + if ( temp_write_index == temp_read_index ) { // Go back to live viewing Warning( "Exceeded temporary buffer, resuming live play" ); // Clear paused flag @@ -4309,79 +3852,56 @@ void MonitorStream::runStream() delayed = false; replay_rate = ZM_RATE_BASE; } - } - else - { + } else { Warning( "Unable to store frame as timestamp invalid" ); } - } - else - { + } else { Warning( "Unable to store frame as shared memory invalid" ); } } frame_count++; } usleep( (unsigned long)((1000000 * ZM_RATE_BASE)/((base_fps?base_fps:1)*abs(replay_rate*2))) ); - if ( ttl ) - { - if ( (now.tv_sec - stream_start_time) > ttl ) - { + if ( ttl ) { + if ( (now.tv_sec - stream_start_time) > ttl ) { break; } } - if ( (TV_2_FLOAT( now ) - last_frame_sent) > max_secs_since_last_sent_frame ) - { + if ( (TV_2_FLOAT( now ) - last_frame_sent) > max_secs_since_last_sent_frame ) { Error( "Terminating, last frame sent time %f secs more than maximum of %f", TV_2_FLOAT( now ) - last_frame_sent, max_secs_since_last_sent_frame ); break; } } - if ( buffered_playback ) - { + if ( buffered_playback ) { Debug( 1, "Cleaning swap files from %s", swap_path ); struct stat stat_buf; - if ( stat( swap_path, &stat_buf ) < 0 ) - { - if ( errno != ENOENT ) - { + if ( stat( swap_path, &stat_buf ) < 0 ) { + if ( errno != ENOENT ) { Error( "Can't stat '%s': %s", swap_path, strerror(errno) ); } - } - else if ( !S_ISDIR(stat_buf.st_mode) ) - { + } else if ( !S_ISDIR(stat_buf.st_mode) ) { Error( "Swap image path '%s' is not a directory", swap_path ); - } - else - { + } else { char glob_pattern[PATH_MAX] = ""; snprintf( glob_pattern, sizeof(glob_pattern), "%s/*.*", swap_path ); glob_t pglob; int glob_status = glob( glob_pattern, 0, 0, &pglob ); - if ( glob_status != 0 ) - { - if ( glob_status < 0 ) - { + if ( glob_status != 0 ) { + if ( glob_status < 0 ) { Error( "Can't glob '%s': %s", glob_pattern, strerror(errno) ); - } - else - { + } else { Debug( 1, "Can't glob '%s': %d", glob_pattern, glob_status ); } - } - else - { - for ( unsigned int i = 0; i < pglob.gl_pathc; i++ ) - { - if ( unlink( pglob.gl_pathv[i] ) < 0 ) - { + } else { + for ( unsigned int i = 0; i < pglob.gl_pathc; i++ ) { + if ( unlink( pglob.gl_pathv[i] ) < 0 ) { Error( "Can't unlink '%s': %s", pglob.gl_pathv[i], strerror(errno) ); } } } globfree( &pglob ); - if ( rmdir( swap_path ) < 0 ) - { + if ( rmdir( swap_path ) < 0 ) { Error( "Can't rmdir '%s': %s", swap_path, strerror(errno) ); } } @@ -4390,8 +3910,7 @@ void MonitorStream::runStream() closeComms(); } -void Monitor::SingleImage( int scale) -{ +void Monitor::SingleImage( int scale) { int img_buffer_size = 0; static JOCTET img_buffer[ZM_MAX_IMAGE_SIZE]; Image scaled_image; @@ -4399,14 +3918,12 @@ void Monitor::SingleImage( int scale) Snapshot *snap = &image_buffer[index]; Image *snap_image = snap->image; - if ( scale != ZM_SCALE_BASE ) - { + if ( scale != ZM_SCALE_BASE ) { scaled_image.Assign( *snap_image ); scaled_image.Scale( scale ); snap_image = &scaled_image; } - if ( !config.timestamp_on_capture ) - { + if ( !config.timestamp_on_capture ) { TimestampImage( snap_image, snap->timestamp ); } snap_image->EncodeJpeg( img_buffer, &img_buffer_size ); @@ -4416,21 +3933,18 @@ void Monitor::SingleImage( int scale) fwrite( img_buffer, img_buffer_size, 1, stdout ); } -void Monitor::SingleImageRaw( int scale) -{ +void Monitor::SingleImageRaw( int scale) { Image scaled_image; int index = shared_data->last_write_index%image_buffer_count; Snapshot *snap = &image_buffer[index]; Image *snap_image = snap->image; - if ( scale != ZM_SCALE_BASE ) - { + if ( scale != ZM_SCALE_BASE ) { scaled_image.Assign( *snap_image ); scaled_image.Scale( scale ); snap_image = &scaled_image; } - if ( !config.timestamp_on_capture ) - { + if ( !config.timestamp_on_capture ) { TimestampImage( snap_image, snap->timestamp ); } @@ -4439,8 +3953,7 @@ void Monitor::SingleImageRaw( int scale) fwrite( snap_image->Buffer(), snap_image->Size(), 1, stdout ); } -void Monitor::SingleImageZip( int scale) -{ +void Monitor::SingleImageZip( int scale) { unsigned long img_buffer_size = 0; static Bytef img_buffer[ZM_MAX_IMAGE_SIZE]; Image scaled_image; @@ -4448,14 +3961,12 @@ void Monitor::SingleImageZip( int scale) Snapshot *snap = &image_buffer[index]; Image *snap_image = snap->image; - if ( scale != ZM_SCALE_BASE ) - { + if ( scale != ZM_SCALE_BASE ) { scaled_image.Assign( *snap_image ); scaled_image.Scale( scale ); snap_image = &scaled_image; } - if ( !config.timestamp_on_capture ) - { + if ( !config.timestamp_on_capture ) { TimestampImage( snap_image, snap->timestamp ); } snap_image->Zip( img_buffer, &img_buffer_size ); diff --git a/src/zm_monitor.h b/src/zm_monitor.h index f17ab0977..bb58ae374 100644 --- a/src/zm_monitor.h +++ b/src/zm_monitor.h @@ -46,20 +46,17 @@ class Monitor; // This is the main class for monitors. Each monitor is associated // with a camera and is effectively a collector for events. // -class Monitor -{ -friend class MonitorStream; +class Monitor { + friend class MonitorStream; public: - typedef enum - { + typedef enum { QUERY=0, CAPTURE, ANALYSIS } Purpose; - typedef enum - { + typedef enum { NONE=1, MONITOR, MODECT, @@ -68,8 +65,7 @@ public: NODECT } Function; - typedef enum - { + typedef enum { ROTATE_0=1, ROTATE_90, ROTATE_180, @@ -78,8 +74,7 @@ public: FLIP_VERT } Orientation; - typedef enum - { + typedef enum { IDLE, PREALARM, ALARM, @@ -87,6 +82,12 @@ public: TAPE } State; + typedef enum { + DISABLED, + X264ENCODE, + H264PASSTHROUGH, + } VideoWriter; + protected: typedef std::set ZoneSet; @@ -95,8 +96,7 @@ protected: typedef enum { CLOSE_TIME, CLOSE_IDLE, CLOSE_ALARM } EventCloseMode; /* sizeof(SharedData) expected to be 336 bytes on 32bit and 64bit */ - typedef struct - { + typedef struct { uint32_t size; /* +0 */ uint32_t last_write_index; /* +4 */ uint32_t last_read_index; /* +8 */ @@ -121,12 +121,12 @@ protected: ** Shared memory layout should be identical for both 32bit and 64bit and is multiples of 16. */ union { /* +64 */ - time_t last_write_time; - uint64_t extrapad1; + time_t last_write_time; + uint64_t extrapad1; }; union { /* +72 */ - time_t last_read_time; - uint64_t extrapad2; + time_t last_read_time; + uint64_t extrapad2; }; uint8_t control_state[256]; /* +80 */ @@ -135,8 +135,7 @@ protected: typedef enum { TRIGGER_CANCEL, TRIGGER_ON, TRIGGER_OFF } TriggerState; /* sizeof(TriggerData) expected to be 560 on 32bit & and 64bit */ - typedef struct - { + typedef struct { uint32_t size; uint32_t trigger_state; uint32_t trigger_score; @@ -147,28 +146,25 @@ protected: } TriggerData; /* sizeof(Snapshot) expected to be 16 bytes on 32bit and 32 bytes on 64bit */ - struct Snapshot - { + struct Snapshot { struct timeval *timestamp; Image *image; void* padding; }; - //TODO: Technically we can't exclude this struct when people don't have avformat as the Memory.pm module doesn't know about avformat + //TODO: Technically we can't exclude this struct when people don't have avformat as the Memory.pm module doesn't know about avformat #if 1 - //sizeOf(VideoStoreData) expected to be 4104 bytes on 32bit and 64bit - typedef struct - { - uint32_t size; - char event_file[4096]; - uint32_t recording; //bool arch dependent so use uint32 instead - //uint32_t frameNumber; - } VideoStoreData; + //sizeOf(VideoStoreData) expected to be 4104 bytes on 32bit and 64bit + typedef struct { + uint32_t size; + char event_file[4096]; + timeval recording; //bool arch dependent so use uint32 instead + //uint32_t frameNumber; + } VideoStoreData; #endif // HAVE_LIBAVFORMAT - class MonitorLink - { + class MonitorLink { protected: unsigned int id; char name[64]; @@ -196,21 +192,17 @@ protected: MonitorLink( int p_id, const char *p_name ); ~MonitorLink(); - inline int Id() const - { + inline int Id() const { return( id ); } - inline const char *Name() const - { + inline const char *Name() const { return( name ); } - inline bool isConnected() const - { + inline bool isConnected() const { return( connected ); } - inline time_t getLastConnectTime() const - { + inline time_t getLastConnectTime() const { return( last_connect_time ); } @@ -237,7 +229,7 @@ protected: unsigned int deinterlacing; int savejpegspref; - int videowriterpref; + VideoWriter videowriter; std::string encoderparams; std::vector encoderparamsvec; bool record_audio; // Whether to store the audio that we receive @@ -271,7 +263,7 @@ protected: int alarm_ref_blend_perc; // Percentage of new image going into reference image during alarm. bool track_motion; // Whether this monitor tries to track detected motion Rgb signal_check_colour; // The colour that the camera will emit when no video signal detected - bool embed_exif; // Whether to embed Exif data into each image frame or not + bool embed_exif; // Whether to embed Exif data into each image frame or not double fps; Image delta_image; @@ -291,7 +283,7 @@ protected: time_t start_time; time_t last_fps_time; time_t auto_resume_time; - unsigned int last_motion_score; + unsigned int last_motion_score; EventCloseMode event_close_mode; @@ -342,7 +334,7 @@ public: int p_orientation, unsigned int p_deinterlacing, int p_savejpegs, - int p_videowriter, + VideoWriter p_videowriter, std::string p_encoderparams, bool p_record_audio, const char *p_event_prefix, @@ -378,47 +370,38 @@ public: void AddPrivacyBitmask( Zone *p_zones[] ); bool connect(); - inline int ShmValid() const - { + inline int ShmValid() const { return( shared_data->valid ); } - inline int Id() const - { + inline int Id() const { return( id ); } - inline const char *Name() const - { + inline const char *Name() const { return( name ); } - inline Function GetFunction() const - { + inline Function GetFunction() const { return( function ); } - inline bool Enabled() - { + inline bool Enabled() { if ( function <= MONITOR ) return( false ); return( enabled ); } - inline const char *EventPrefix() const - { + inline const char *EventPrefix() const { return( event_prefix ); } - inline bool Ready() - { + inline bool Ready() { if ( function <= MONITOR ) return( false ); return( image_count > ready_count ); } - inline bool Active() - { + inline bool Active() { if ( function <= MONITOR ) return( false ); return( enabled && shared_data->active ); } - inline bool Exif() - { + inline bool Exif() { return( embed_exif ); } Orientation getOrientation() const; @@ -429,9 +412,10 @@ public: unsigned int SubpixelOrder() const; int GetOptSaveJPEGs() const { return( savejpegspref ); } - int GetOptVideoWriter() const { return( videowriterpref ); } + VideoWriter GetOptVideoWriter() const { return( videowriter ); } const std::vector* GetOptEncoderParams() const { return( &encoderparamsvec ); } + unsigned int GetPreEventCount() const { return pre_event_count; }; State GetState() const; int GetImage( int index=-1, int scale=100 ); struct timeval GetTimestamp( int index=-1 ) const; @@ -504,8 +488,7 @@ public: #define MOD_ADD( var, delta, limit ) (((var)+(limit)+(delta))%(limit)) -class MonitorStream : public StreamBase -{ +class MonitorStream : public StreamBase { protected: typedef struct SwapImage { bool valid; @@ -536,19 +519,15 @@ protected: void processCommand( const CmdMsg *msg ); public: - MonitorStream() : playback_buffer( 0 ), delayed( false ), frame_count( 0 ) - { + MonitorStream() : playback_buffer( 0 ), delayed( false ), frame_count( 0 ) { } - void setStreamBuffer( int p_playback_buffer ) - { + void setStreamBuffer( int p_playback_buffer ) { playback_buffer = p_playback_buffer; } - void setStreamTTL( time_t p_ttl ) - { + void setStreamTTL( time_t p_ttl ) { ttl = p_ttl; } - bool setStreamStart( int monitor_id ) - { + bool setStreamStart( int monitor_id ) { return loadMonitor( monitor_id ); } void runStream(); diff --git a/src/zm_packet.cpp b/src/zm_packet.cpp new file mode 100644 index 000000000..8fbb65cb8 --- /dev/null +++ b/src/zm_packet.cpp @@ -0,0 +1,44 @@ +//ZoneMinder Packet Implementation Class +//Copyright 2017 ZoneMinder LLC +// +//This file is part of ZoneMinder. +// +//ZoneMinder is free software: you can redistribute it and/or modify +//it under the terms of the GNU General Public License as published by +//the Free Software Foundation, either version 3 of the License, or +//(at your option) any later version. +// +//ZoneMinder is distributed in the hope that it will be useful, +//but WITHOUT ANY WARRANTY; without even the implied warranty of +//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//GNU General Public License for more details. +// +//You should have received a copy of the GNU General Public License +//along with ZoneMinder. If not, see . + + +#include "zm_packet.h" +#include "zm_ffmpeg.h" + +using namespace std; + +ZMPacket::ZMPacket( AVPacket *p ) { + av_init_packet( &packet ); + if ( zm_av_packet_ref( &packet, p ) < 0 ) { + Error("error refing packet"); + } + gettimeofday( ×tamp, NULL ); +} + +ZMPacket::ZMPacket( AVPacket *p, struct timeval *t ) { + av_init_packet( &packet ); + if ( zm_av_packet_ref( &packet, p ) < 0 ) { + Error("error refing packet"); + } + timestamp = *t; +} + +ZMPacket::~ZMPacket() { + zm_av_packet_unref( &packet ); +} + diff --git a/src/zm_packet.h b/src/zm_packet.h new file mode 100644 index 000000000..9fd7ed8ee --- /dev/null +++ b/src/zm_packet.h @@ -0,0 +1,39 @@ +//ZoneMinder Packet Wrapper Class +//Copyright 2017 ZoneMinder LLC +// +//This file is part of ZoneMinder. +// +//ZoneMinder is free software: you can redistribute it and/or modify +//it under the terms of the GNU General Public License as published by +//the Free Software Foundation, either version 3 of the License, or +//(at your option) any later version. +// +//ZoneMinder is distributed in the hope that it will be useful, +//but WITHOUT ANY WARRANTY; without even the implied warranty of +//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//GNU General Public License for more details. +// +//You should have received a copy of the GNU General Public License +//along with ZoneMinder. If not, see . + + +#ifndef ZM_PACKET_H +#define ZM_PACKET_H + +extern "C" { +#include +} + +class ZMPacket { + public: + + AVPacket packet; + struct timeval timestamp; + public: + AVPacket *av_packet() { return &packet; } + ZMPacket( AVPacket *packet, struct timeval *timestamp ); + ZMPacket( AVPacket *packet ); + ~ZMPacket(); +}; + +#endif /* ZM_PACKET_H */ diff --git a/src/zm_packetqueue.cpp b/src/zm_packetqueue.cpp new file mode 100644 index 000000000..f60b11df5 --- /dev/null +++ b/src/zm_packetqueue.cpp @@ -0,0 +1,152 @@ +//ZoneMinder Packet Queue Implementation Class +//Copyright 2016 Steve Gilvarry +// +//This file is part of ZoneMinder. +// +//ZoneMinder is free software: you can redistribute it and/or modify +//it under the terms of the GNU General Public License as published by +//the Free Software Foundation, either version 3 of the License, or +//(at your option) any later version. +// +//ZoneMinder is distributed in the hope that it will be useful, +//but WITHOUT ANY WARRANTY; without even the implied warranty of +//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//GNU General Public License for more details. +// +//You should have received a copy of the GNU General Public License +//along with ZoneMinder. If not, see . + + +#include "zm_packetqueue.h" +#include "zm_ffmpeg.h" + +#define VIDEO_QUEUESIZE 200 +#define AUDIO_QUEUESIZE 50 + +using namespace std; + +zm_packetqueue::zm_packetqueue(){ + +} + +zm_packetqueue::~zm_packetqueue() { + +} + +bool zm_packetqueue::queuePacket( ZMPacket* zm_packet ) { + pktQueue.push_back( zm_packet ); + + return true; +} +bool zm_packetqueue::queuePacket( AVPacket* av_packet ) { + + ZMPacket *zm_packet = new ZMPacket( av_packet ); + + pktQueue.push_back( zm_packet ); + + return true; +} + +ZMPacket* zm_packetqueue::popPacket( ) { + if ( pktQueue.empty() ) { + return NULL; + } + + ZMPacket *packet = pktQueue.front(); + pktQueue.pop_front(); + + return packet; +} + +unsigned int zm_packetqueue::clearQueue( unsigned int frames_to_keep, int stream_id ) { + + Debug(3, "Clearing all but %d frames", frames_to_keep ); + frames_to_keep += 1; + + if ( pktQueue.empty() ) { + Debug(3, "Queue is empty"); + return 0; + } else { + Debug(3, "Queue has (%d)", pktQueue.size() ); + } + + list::reverse_iterator it; + ZMPacket *packet = NULL; + + for ( it = pktQueue.rbegin(); it != pktQueue.rend() && frames_to_keep; ++it ) { + ZMPacket *zm_packet = *it; + AVPacket *av_packet = &(zm_packet->packet); + + Debug(3, "Looking at packet with stream index (%d) with keyframe (%d), frames_to_keep is (%d)", av_packet->stream_index, ( av_packet->flags & AV_PKT_FLAG_KEY ), frames_to_keep ); + + // Want frames_to_keep video keyframes. Otherwise, we may not have enough + if ( ( av_packet->stream_index == stream_id) && ( av_packet->flags & AV_PKT_FLAG_KEY ) ) { + if (!frames_to_keep) + break; + frames_to_keep --; + } + } + unsigned int delete_count = 0; + while ( it != pktQueue.rend() ) { + Debug(3, "Deleting a packet from the front, count is (%d)", delete_count ); + + packet = pktQueue.front(); + pktQueue.pop_front(); + delete packet; + + delete_count += 1; + } + return delete_count; +} // end unsigned int zm_packetqueue::clearQueue( unsigned int frames_to_keep, int stream_id ) + +void zm_packetqueue::clearQueue() { + ZMPacket *packet = NULL; + while(!pktQueue.empty()) { + packet = pktQueue.front(); + pktQueue.pop_front(); + delete packet; + } +} + +unsigned int zm_packetqueue::size() { + return pktQueue.size(); +} + + +void zm_packetqueue::clear_unwanted_packets( timeval *recording_started, int mVideoStreamId ) { + // Need to find the keyframe <= recording_started. Can get rid of audio packets. + if ( pktQueue.empty() ) { + return; + } + + // Step 1 - find keyframe < recording_started. + // Step 2 - pop packets until we get to the packet in step 2 + list::reverse_iterator it; + + for ( it = pktQueue.rbegin(); it != pktQueue.rend(); ++ it ) { + ZMPacket *zm_packet = *it; + AVPacket *av_packet = &(zm_packet->packet); +Debug(1, "Looking for keyframe after start" ); + if ( + ( av_packet->flags & AV_PKT_FLAG_KEY ) + && + ( av_packet->stream_index == mVideoStreamId ) + && + timercmp( &(zm_packet->timestamp), recording_started, < ) + ) { +Debug(1, "Found keyframe before start" ); + break; + } + } + if ( it == pktQueue.rend() ) { + Debug(1, "Didn't find a keyframe packet keeping all" ); + return; + } + + ZMPacket *packet = NULL; + while ( pktQueue.rend() != it ) { + packet = pktQueue.front(); + pktQueue.pop_front(); + delete packet; + } +} diff --git a/src/zm_packetqueue.h b/src/zm_packetqueue.h new file mode 100644 index 000000000..39160ddfd --- /dev/null +++ b/src/zm_packetqueue.h @@ -0,0 +1,52 @@ +//ZoneMinder Packet Queue Interface Class +//Copyright 2016 Steve Gilvarry +// +//This file is part of ZoneMinder. +// +//ZoneMinder is free software: you can redistribute it and/or modify +//it under the terms of the GNU General Public License as published by +//the Free Software Foundation, either version 3 of the License, or +//(at your option) any later version. +// +//ZoneMinder is distributed in the hope that it will be useful, +//but WITHOUT ANY WARRANTY; without even the implied warranty of +//MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +//GNU General Public License for more details. +// +//You should have received a copy of the GNU General Public License +//along with ZoneMinder. If not, see . + + +#ifndef ZM_PACKETQUEUE_H +#define ZM_PACKETQUEUE_H + +//#include +//#include +//#include +#include +#include "zm_packet.h" + +extern "C" { +#include +} + +class zm_packetqueue { +public: + zm_packetqueue(); + virtual ~zm_packetqueue(); + bool queuePacket( AVPacket* packet, struct timeval *timestamp ); + bool queuePacket( ZMPacket* packet ); + bool queuePacket( AVPacket* packet ); + ZMPacket * popPacket( ); + bool popVideoPacket(ZMPacket* packet); + bool popAudioPacket(ZMPacket* packet); + unsigned int clearQueue( unsigned int video_frames_to_keep, int stream_id ); + void clearQueue( ); + unsigned int size(); + void clear_unwanted_packets( timeval *recording, int mVideoStreamId ); +private: + std::list pktQueue; + +}; + +#endif /* ZM_PACKETQUEUE_H */ diff --git a/src/zm_remote_camera.h b/src/zm_remote_camera.h index ea510b42f..b081baeb0 100644 --- a/src/zm_remote_camera.h +++ b/src/zm_remote_camera.h @@ -89,7 +89,7 @@ public: virtual int PreCapture() = 0; virtual int Capture( Image &image ) = 0; virtual int PostCapture() = 0; - virtual int CaptureAndRecord( Image &image, bool recording, char* event_directory )=0; + virtual int CaptureAndRecord( Image &image, timeval recording, char* event_directory )=0; }; #endif // ZM_REMOTE_CAMERA_H diff --git a/src/zm_remote_camera_http.cpp b/src/zm_remote_camera_http.cpp index d9a0c0b05..f80c3cdd6 100644 --- a/src/zm_remote_camera_http.cpp +++ b/src/zm_remote_camera_http.cpp @@ -189,7 +189,7 @@ int RemoteCameraHttp::SendRequest() * > 0 is the # of bytes read. */ -int RemoteCameraHttp::ReadData( Buffer &buffer, int bytes_expected ) +int RemoteCameraHttp::ReadData( Buffer &buffer, unsigned int bytes_expected ) { fd_set rfds; FD_ZERO(&rfds); diff --git a/src/zm_remote_camera_http.h b/src/zm_remote_camera_http.h index e03aead78..395ae3975 100644 --- a/src/zm_remote_camera_http.h +++ b/src/zm_remote_camera_http.h @@ -53,12 +53,12 @@ public: int Connect(); int Disconnect(); int SendRequest(); - int ReadData( Buffer &buffer, int bytes_expected=0 ); + int ReadData( Buffer &buffer, unsigned int bytes_expected=0 ); int GetResponse(); int PreCapture(); int Capture( Image &image ); int PostCapture(); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ) {return(0);}; + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ) {return(0);}; }; #endif // ZM_REMOTE_CAMERA_HTTP_H diff --git a/src/zm_remote_camera_rtsp.cpp b/src/zm_remote_camera_rtsp.cpp index d2196756f..5a9028aac 100644 --- a/src/zm_remote_camera_rtsp.cpp +++ b/src/zm_remote_camera_rtsp.cpp @@ -58,10 +58,9 @@ RemoteCameraRtsp::RemoteCameraRtsp( unsigned int p_monitor_id, const std::string mRawFrame = NULL; mFrame = NULL; frameCount = 0; - wasRecording = false; startTime=0; -#if HAVE_LIBSWSCALE +#if HAVE_LIBSWSCALE mConvertContext = NULL; #endif /* Has to be located inside the constructor so other components such as zma will receive correct colours and subpixel order */ @@ -82,13 +81,8 @@ RemoteCameraRtsp::RemoteCameraRtsp( unsigned int p_monitor_id, const std::string RemoteCameraRtsp::~RemoteCameraRtsp() { -#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101) av_frame_free( &mFrame ); av_frame_free( &mRawFrame ); -#else - av_freep( &mFrame ); - av_freep( &mRawFrame ); -#endif #if HAVE_LIBSWSCALE if ( mConvertContext ) @@ -117,7 +111,7 @@ void RemoteCameraRtsp::Initialise() int max_size = width*height*colours; // This allocates a buffer able to hold a raw fframe, which is a little artbitrary. Might be nice to get some - // decent data on how large a buffer is really needed. + // decent data on how large a buffer is really needed. I think in ffmpeg there are now some functions to do that. buffer.size( max_size ); if ( logDebugging() ) @@ -172,6 +166,7 @@ int RemoteCameraRtsp::PrimeCapture() // Find first video stream present mVideoStreamId = -1; + mAudioStreamId = -1; // Find the first video stream. for ( unsigned int i = 0; i < mFormatContext->nb_streams; i++ ) { @@ -181,12 +176,31 @@ int RemoteCameraRtsp::PrimeCapture() if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO ) #endif { - mVideoStreamId = i; - break; + if ( mVideoStreamId == -1 ) { + mVideoStreamId = i; + continue; + } else { + Debug(2, "Have another video stream." ); + } } - } +#if (LIBAVCODEC_VERSION_CHECK(52, 64, 0, 64, 0) || LIBAVUTIL_VERSION_CHECK(50, 14, 0, 14, 0)) + if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO ) +#else + if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO ) +#endif + { + if ( mAudioStreamId == -1 ) { + mAudioStreamId = i; + } else { + Debug(2, "Have another audio stream." ); + } + } + } // end foreach stream + if ( mVideoStreamId == -1 ) Fatal( "Unable to locate video stream" ); + if ( mAudioStreamId == -1 ) + Debug( 3, "Unable to locate audio stream" ); // Get a pointer to the codec context for the video stream mCodecContext = mFormatContext->streams[mVideoStreamId]->codec; @@ -248,8 +262,7 @@ int RemoteCameraRtsp::PrimeCapture() return( 0 ); } -int RemoteCameraRtsp::PreCapture() -{ +int RemoteCameraRtsp::PreCapture() { if ( !rtspThread->isRunning() ) return( -1 ); if ( !rtspThread->hasSources() ) @@ -260,8 +273,7 @@ int RemoteCameraRtsp::PreCapture() return( 0 ); } -int RemoteCameraRtsp::Capture( Image &image ) -{ +int RemoteCameraRtsp::Capture( Image &image ) { AVPacket packet; uint8_t* directbuffer; int frameComplete = false; @@ -272,15 +284,13 @@ int RemoteCameraRtsp::Capture( Image &image ) Error("Failed requesting writeable buffer for the captured image."); return (-1); } - - while ( true ) - { + + while ( true ) { buffer.clear(); if ( !rtspThread->isRunning() ) return (-1); - if ( rtspThread->getFrame( buffer ) ) - { + if ( rtspThread->getFrame( buffer ) ) { Debug( 3, "Read frame %d bytes", buffer.size() ); Debug( 4, "Address %p", buffer.head() ); Hexdump( 4, buffer.head(), 16 ); @@ -288,18 +298,17 @@ int RemoteCameraRtsp::Capture( Image &image ) if ( !buffer.size() ) return( -1 ); - if(mCodecContext->codec_id == AV_CODEC_ID_H264) - { + if(mCodecContext->codec_id == AV_CODEC_ID_H264) { // SPS and PPS frames should be saved and appended to IDR frames int nalType = (buffer.head()[3] & 0x1f); - // SPS + // SPS The SPS NAL unit contains parameters that apply to a series of consecutive coded video pictures if(nalType == 7) { lastSps = buffer; continue; } - // PPS + // PPS The PPS NAL unit contains parameters that apply to the decoding of one or more individual pictures inside a coded video sequence else if(nalType == 8) { lastPps = buffer; @@ -311,6 +320,8 @@ int RemoteCameraRtsp::Capture( Image &image ) buffer += lastSps; buffer += lastPps; } + } else { + Debug(3, "Not an h264 packet"); } av_init_packet( &packet ); @@ -339,37 +350,35 @@ int RemoteCameraRtsp::Capture( Image &image ) } // At this point, we either have a frame or ran out of buffer. What happens if we run out of buffer? if ( frameComplete ) { - + Debug( 3, "Got frame %d", frameCount ); + avpicture_fill( (AVPicture *)mFrame, directbuffer, imagePixFormat, width, height ); - -#if HAVE_LIBSWSCALE + + #if HAVE_LIBSWSCALE if(mConvertContext == NULL) { mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); if(mConvertContext == NULL) Fatal( "Unable to create conversion context"); } - + if ( sws_scale( mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mCodecContext->height, mFrame->data, mFrame->linesize ) < 0 ) Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); -#else // HAVE_LIBSWSCALE - Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" ); -#endif // HAVE_LIBSWSCALE - frameCount++; + #else // HAVE_LIBSWSCALE + Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" ); + #endif // HAVE_LIBSWSCALE + + frameCount++; } /* frame complete */ - -#if LIBAVCODEC_VERSION_CHECK(57, 8, 0, 12, 100) - av_packet_unref( &packet ); -#else - av_free_packet( &packet ); -#endif + + zm_av_packet_unref( &packet ); } /* getFrame() */ - + if(frameComplete) return (0); - + } // end while true // can never get here. @@ -377,32 +386,63 @@ int RemoteCameraRtsp::Capture( Image &image ) } //Function to handle capture and store -int RemoteCameraRtsp::CaptureAndRecord( Image &image, bool recording, char* event_file ) { + +int RemoteCameraRtsp::CaptureAndRecord(Image &image, timeval recording, char* event_file ) { AVPacket packet; uint8_t* directbuffer; int frameComplete = false; - /* Request a writeable buffer of the target image */ - directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); - if(directbuffer == NULL) { - Error("Failed requesting writeable buffer for the captured image."); - return (-1); - } while ( true ) { + +// WHY Are we clearing it? Might be something good in it. buffer.clear(); + if ( !rtspThread->isRunning() ) return (-1); + //Video recording + if ( recording.tv_sec ) { + // The directory we are recording to is no longer tied to the current event. + // Need to re-init the videostore with the correct directory and start recording again + // Not sure why we are only doing this on keyframe, al + if ( videoStore && (strcmp(oldDirectory, event_file)!=0) ) { + //don't open new videostore until we're on a key frame..would this require an offset adjustment for the event as a result?...if we store our key frame location with the event will that be enough? + Info("Re-starting video storage module"); + if ( videoStore ) { + delete videoStore; + videoStore = NULL; + } + } // end if changed to new event + + if ( ! videoStore ) { + //Instantiate the video storage module + + videoStore = new VideoStore((const char *)event_file, "mp4", + mFormatContext->streams[mVideoStreamId], + mAudioStreamId==-1?NULL:mFormatContext->streams[mAudioStreamId], + startTime, + this->getMonitor() ); + strcpy(oldDirectory, event_file); + } // end if ! videoStore + + } else { + if ( videoStore ) { + Info("Deleting videoStore instance"); + delete videoStore; + videoStore = NULL; + } + } // end if recording or not + if ( rtspThread->getFrame( buffer ) ) { Debug( 3, "Read frame %d bytes", buffer.size() ); Debug( 4, "Address %p", buffer.head() ); - Hexdump( 4, buffer.head(), 16 ); + Hexdump( 4, buffer.head(), 16 ); if ( !buffer.size() ) return( -1 ); - if(mCodecContext->codec_id == AV_CODEC_ID_H264) { + if ( mCodecContext->codec_id == AV_CODEC_ID_H264 ) { // SPS and PPS frames should be saved and appended to IDR frames int nalType = (buffer.head()[3] & 0x1f); @@ -425,13 +465,14 @@ int RemoteCameraRtsp::CaptureAndRecord( Image &image, bool recording, char* even av_init_packet( &packet ); - // Why are we checking for it being the video stream - if ( packet.stream_index == mVideoStreamId ) { - - while ( !frameComplete && buffer.size() > 0 ) { - packet.data = buffer.head(); - packet.size = buffer.size(); + // Keep decoding until a complete frame is had. + while ( !frameComplete && buffer.size() > 0 ) { + packet.data = buffer.head(); + packet.size = buffer.size(); + // Why are we checking for it being the video stream? Because it might be audio or something else. + // Um... we just initialized packet... we can't be testing for what it is yet.... + if ( packet.stream_index == mVideoStreamId ) { // So this does the decode #if LIBAVCODEC_VERSION_CHECK(52, 23, 0, 23, 0) int len = avcodec_decode_video2( mCodecContext, mRawFrame, &frameComplete, &packet ); @@ -449,117 +490,81 @@ int RemoteCameraRtsp::CaptureAndRecord( Image &image, bool recording, char* even //Hexdump( 0, buffer.head(), buffer.size() ); buffer -= len; - } // end while get & decode a frame - if ( frameComplete ) { + if ( frameComplete ) { - Debug( 3, "Got frame %d", frameCount ); + Debug( 3, "Got frame %d", frameCount ); + + /* Request a writeable buffer of the target image */ + directbuffer = image.WriteBuffer(width, height, colours, subpixelorder); + if(directbuffer == NULL) { + Error("Failed requesting writeable buffer for the captured image."); + return (-1); + } #if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0) - av_image_fill_arrays(mFrame->data, mFrame->linesize, - directbuffer, imagePixFormat, width, height, 1); + av_image_fill_arrays(mFrame->data, mFrame->linesize, + directbuffer, imagePixFormat, width, height, 1); #else - avpicture_fill( (AVPicture *)mFrame, directbuffer, - imagePixFormat, width, height); + avpicture_fill( (AVPicture *)mFrame, directbuffer, + imagePixFormat, width, height); #endif - //Video recording - if ( recording && !wasRecording ) { - //Instantiate the video storage module + } // endif frameComplete - videoStore = new VideoStore((const char *)event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - mAudioStreamId==-1?NULL:mFormatContext->streams[mAudioStreamId], - startTime, - this->getMonitor()->getOrientation() ); - wasRecording = true; - strcpy(oldDirectory, event_file); - - } else if ( !recording && wasRecording && videoStore ) { - // Why are we deleting the videostore? Becase for soem reason we are no longer recording? How does that happen? - Info("Deleting videoStore instance"); - delete videoStore; - videoStore = NULL; - } - - //The directory we are recording to is no longer tied to the current event. Need to re-init the videostore with the correct directory and start recording again - if ( recording && wasRecording && (strcmp(oldDirectory, event_file)!=0) && (packet.flags & AV_PKT_FLAG_KEY) ) { - //don't open new videostore until we're on a key frame..would this require an offset adjustment for the event as a result?...if we store our key frame location with the event will that be enough? - Info("Re-starting video storage module"); - if ( videoStore ) { - delete videoStore; - videoStore = NULL; - } - - videoStore = new VideoStore((const char *)event_file, "mp4", - mFormatContext->streams[mVideoStreamId], - mAudioStreamId==-1?NULL:mFormatContext->streams[mAudioStreamId], - startTime, - this->getMonitor()->getOrientation() ); - strcpy( oldDirectory, event_file ); - } - - if ( videoStore && recording ) { + if ( videoStore ) { //Write the packet to our video store - int ret = videoStore->writeVideoFramePacket(&packet, mFormatContext->streams[mVideoStreamId]);//, &lastKeyframePkt); + int ret = videoStore->writeVideoFramePacket(&packet);//, &lastKeyframePkt); if ( ret < 0 ) {//Less than zero and we skipped a frame - av_free_packet( &packet ); +// Should not + zm_av_packet_unref( &packet ); return 0; } - } + } // end if videoStore, so we are recording #if HAVE_LIBSWSCALE - if(mConvertContext == NULL) { - mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); + // Why are we re-scaling after writing out the packet? + if ( mConvertContext == NULL ) { + mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL ); - if(mConvertContext == NULL) - Fatal( "Unable to create conversion context"); + if ( mConvertContext == NULL ) + Fatal( "Unable to create conversion context"); } if ( sws_scale( mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mCodecContext->height, mFrame->data, mFrame->linesize ) < 0 ) - Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); + Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount ); #else // HAVE_LIBSWSCALE Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" ); #endif // HAVE_LIBSWSCALE frameCount++; - } /* frame complete */ } else if ( packet.stream_index == mAudioStreamId ) { Debug( 4, "Got audio packet" ); - if ( videoStore && recording ) { - if ( record_audio ) { - Debug( 4, "Storing Audio packet" ); - //Write the packet to our video store - int ret = videoStore->writeAudioFramePacket(&packet, mFormatContext->streams[packet.stream_index]); //FIXME no relevance of last key frame - if ( ret < 0 ) { //Less than zero and we skipped a frame -#if LIBAVCODEC_VERSION_CHECK(57, 8, 0, 12, 100) - av_packet_unref( &packet ); -#else - av_free_packet( &packet ); -#endif - return 0; - } + if ( videoStore && record_audio ) { + Debug( 4, "Storing Audio packet" ); + //Write the packet to our video store + int ret = videoStore->writeAudioFramePacket( &packet ); //FIXME no relevance of last key frame + if ( ret < 0 ) { //Less than zero and we skipped a frame + zm_av_packet_unref( &packet ); + return 0; } } } // end if video or audio packet -#if LIBAVCODEC_VERSION_CHECK(57, 8, 0, 12, 100) - av_packet_unref( &packet ); -#else - av_free_packet( &packet ); -#endif - } /* getFrame() */ - + zm_av_packet_unref( &packet ); + } // end while ! framecomplete and buffer.size() if(frameComplete) return (0); - - } // end while true + } /* getFrame() */ + +} // end while true + +// can never get here. return (0) ; } // int RemoteCameraRtsp::CaptureAndRecord( Image &image, bool recording, char* event_file ) -int RemoteCameraRtsp::PostCapture() -{ +int RemoteCameraRtsp::PostCapture() { return( 0 ); } #endif // HAVE_LIBAVFORMAT diff --git a/src/zm_remote_camera_rtsp.h b/src/zm_remote_camera_rtsp.h index 11f2788e4..6080902b6 100644 --- a/src/zm_remote_camera_rtsp.h +++ b/src/zm_remote_camera_rtsp.h @@ -85,7 +85,7 @@ public: int PreCapture(); int Capture( Image &image ); int PostCapture(); - int CaptureAndRecord( Image &image, bool recording, char* event_directory ); + int CaptureAndRecord( Image &image, timeval recording, char* event_directory ); }; #endif // ZM_REMOTE_CAMERA_RTSP_H diff --git a/src/zm_utils.cpp b/src/zm_utils.cpp index 7e485ac19..f6ce68089 100644 --- a/src/zm_utils.cpp +++ b/src/zm_utils.cpp @@ -395,6 +395,18 @@ void timespec_diff(struct timespec *start, struct timespec *end, struct timespec } } +char *timeval_to_string( struct timeval tv ) { + time_t nowtime; + struct tm *nowtm; + static char tmbuf[64], buf[64]; + + nowtime = tv.tv_sec; + nowtm = localtime(&nowtime); + strftime(tmbuf, sizeof tmbuf, "%Y-%m-%d %H:%M:%S", nowtm); + snprintf(buf, sizeof buf, "%s.%06ld", tmbuf, tv.tv_usec); + return buf; +} + std::string UriDecode( const std::string &encoded ) { #ifdef HAVE_LIBCURL CURL *curl = curl_easy_init(); diff --git a/src/zm_utils.h b/src/zm_utils.h index 7235bb15f..961389611 100644 --- a/src/zm_utils.h +++ b/src/zm_utils.h @@ -61,6 +61,7 @@ void hwcaps_detect(); extern unsigned int sseversion; extern unsigned int neonversion; +char *timeval_to_string( struct timeval tv ); std::string UriDecode( const std::string &encoded ); #endif // ZM_UTILS_H diff --git a/src/zm_videostore.cpp b/src/zm_videostore.cpp index 422853c0f..99b521c3c 100644 --- a/src/zm_videostore.cpp +++ b/src/zm_videostore.cpp @@ -1,4 +1,3 @@ -// // ZoneMinder Video Storage Implementation // Written by Chris Wiggins // http://chriswiggins.co.nz @@ -29,18 +28,19 @@ #include "zm_videostore.h" extern "C"{ -#include "libavutil/time.h" + #include "libavutil/time.h" } VideoStore::VideoStore(const char *filename_in, const char *format_in, - AVStream *input_st, - AVStream *inpaud_st, + AVStream *p_video_input_stream, + AVStream *p_audio_input_stream, int64_t nStartTime, - Monitor::Orientation orientation + Monitor * monitor ) { + video_input_stream = p_video_input_stream; + audio_input_stream = p_audio_input_stream; - AVDictionary *pmetadata = NULL; - int dsr; + video_input_context = video_input_stream->codec; //store inputs in variables local to class filename = filename_in; @@ -49,11 +49,10 @@ VideoStore::VideoStore(const char *filename_in, const char *format_in, keyframeMessage = false; keyframeSkipNumber = 0; - Info("Opening video storage stream %s format: %d\n", filename, format); + Info("Opening video storage stream %s format: %s\n", filename, format); - //Init everything we need - int ret; - av_register_all(); + //Init everything we need, shouldn't have to do this, ffmpeg_camera or something else will call it. + //av_register_all(); ret = avformat_alloc_output_context2(&oc, NULL, NULL, filename); if ( ret < 0 ) { @@ -62,6 +61,8 @@ VideoStore::VideoStore(const char *filename_in, const char *format_in, filename, av_make_error_string(ret).c_str() ); + } else { + Debug(2, "Success alocateing output context"); } //Couldn't deduce format from filename, trying from format name @@ -72,90 +73,148 @@ VideoStore::VideoStore(const char *filename_in, const char *format_in, " could not be assigned based on filename or format %s", filename, format); } + } else { + Debug(2, "Success alocateing output context"); } - dsr = av_dict_set(&pmetadata, "title", "Zoneminder Security Recording", 0); + AVDictionary *pmetadata = NULL; + int dsr = av_dict_set(&pmetadata, "title", "Zoneminder Security Recording", 0); if (dsr < 0) Warning("%s:%d: title set failed", __FILE__, __LINE__ ); oc->metadata = pmetadata; - fmt = oc->oformat; + output_format = oc->oformat; - video_st = avformat_new_stream(oc, (AVCodec *)input_st->codec->codec); - if (!video_st) { + video_output_stream = avformat_new_stream(oc, (AVCodec*)video_input_context->codec); + if (!video_output_stream) { Fatal("Unable to create video out stream\n"); + } else { + Debug(2, "Success creating video out stream" ); } - ret = avcodec_copy_context(video_st->codec, input_st->codec); + video_output_context = video_output_stream->codec; + +#if LIBAVCODEC_VERSION_CHECK(58, 0, 0, 0, 0) + Debug(2, "setting parameters"); + ret = avcodec_parameters_to_context( video_output_context, video_input_stream->codecpar ); + if ( ret < 0 ) { + Error( "Could not initialize stream parameteres"); + return; + } else { + Debug(2, "Success getting parameters"); + } +#else + ret = avcodec_copy_context(video_output_context, video_input_context ); if (ret < 0) { - Fatal("Unable to copy input video context to output video context " - "%s\n", av_make_error_string(ret).c_str()); + Fatal("Unable to copy input video context to output video context %s\n", + av_make_error_string(ret).c_str()); + } else { + Debug(3, "Success copying context" ); + } +#endif + + // Just copy them from the input, no reason to choose different + video_output_context->time_base = video_input_context->time_base; + video_output_stream->time_base = video_input_stream->time_base; + + Debug(3, "Time bases: VIDEO input stream (%d/%d) input codec: (%d/%d) output stream: (%d/%d) output codec (%d/%d)", + video_input_stream->time_base.num, + video_input_stream->time_base.den, + video_input_context->time_base.num, + video_input_context->time_base.den, + video_output_stream->time_base.num, + video_output_stream->time_base.den, + video_output_context->time_base.num, + video_output_context->time_base.den + ); + + // WHY? + //video_output_context->codec_tag = 0; + if (!video_output_context->codec_tag) { + Debug(2, "No codec_tag"); + if (! oc->oformat->codec_tag + || av_codec_get_id (oc->oformat->codec_tag, video_input_context->codec_tag) == video_output_context->codec_id + || av_codec_get_tag(oc->oformat->codec_tag, video_input_context->codec_id) <= 0) { + Warning("Setting codec tag"); + video_output_context->codec_tag = video_input_context->codec_tag; + } } - if ( video_st->sample_aspect_ratio.den != video_st->codec->sample_aspect_ratio.den ) { - Warning("Fixingample_aspect_ratio.den"); - video_st->sample_aspect_ratio.den = video_st->codec->sample_aspect_ratio.den; - } - if ( video_st->sample_aspect_ratio.num != input_st->codec->sample_aspect_ratio.num ) { - Warning("Fixingample_aspect_ratio.num"); - video_st->sample_aspect_ratio.num = input_st->codec->sample_aspect_ratio.num; - } - if ( video_st->codec->codec_id != input_st->codec->codec_id ) { - Warning("Fixing video_st->codec->codec_id"); - video_st->codec->codec_id = input_st->codec->codec_id; - } - if ( ! video_st->codec->time_base.num ) { - Warning("video_st->codec->time_base.num is not set%d/%d. Fixing by setting it to 1", video_st->codec->time_base.num, video_st->codec->time_base.den); - Warning("video_st->codec->time_base.num is not set%d/%d. Fixing by setting it to 1", video_st->time_base.num, video_st->time_base.den); - video_st->codec->time_base.num = video_st->time_base.num; - video_st->codec->time_base.den = video_st->time_base.den; - } - - video_st->codec->codec_tag = 0; if (oc->oformat->flags & AVFMT_GLOBALHEADER) { - video_st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; + video_output_context->flags |= CODEC_FLAG_GLOBAL_HEADER; } + Monitor::Orientation orientation = monitor->getOrientation(); if ( orientation ) { if ( orientation == Monitor::ROTATE_0 ) { } else if ( orientation == Monitor::ROTATE_90 ) { - dsr = av_dict_set( &video_st->metadata, "rotate", "90", 0); + dsr = av_dict_set( &video_output_stream->metadata, "rotate", "90", 0); if (dsr < 0) Warning("%s:%d: title set failed", __FILE__, __LINE__ ); } else if ( orientation == Monitor::ROTATE_180 ) { - dsr = av_dict_set( &video_st->metadata, "rotate", "180", 0); + dsr = av_dict_set( &video_output_stream->metadata, "rotate", "180", 0); if (dsr < 0) Warning("%s:%d: title set failed", __FILE__, __LINE__ ); } else if ( orientation == Monitor::ROTATE_270 ) { - dsr = av_dict_set( &video_st->metadata, "rotate", "270", 0); + dsr = av_dict_set( &video_output_stream->metadata, "rotate", "270", 0); if (dsr < 0) Warning("%s:%d: title set failed", __FILE__, __LINE__ ); } else { Warning( "Unsupported Orientation(%d)", orientation ); } } + audio_output_codec = NULL; + audio_input_context = NULL; + audio_output_stream = NULL; +#ifdef HAVE_LIBAVRESAMPLE + resample_context = NULL; +#endif - if (inpaud_st) { - audio_st = avformat_new_stream(oc, inpaud_st->codec->codec); - if (!audio_st) { - Error("Unable to create audio out stream\n"); - audio_st = NULL; + if (audio_input_stream) { + audio_input_context = audio_input_stream->codec; + + if ( audio_input_context->codec_id != AV_CODEC_ID_AAC ) { + static char error_buffer[256]; + avcodec_string(error_buffer, sizeof(error_buffer), audio_input_context, 0 ); + Debug(3, "Got something other than AAC (%s)", error_buffer ); + if ( ! setup_resampler() ) { + return; + } } else { - ret = avcodec_copy_context(audio_st->codec, inpaud_st->codec); - if (ret < 0) { - Fatal("Unable to copy audio context %s\n", av_make_error_string(ret).c_str()); - } - audio_st->codec->codec_tag = 0; + Debug(3, "Got AAC" ); + + audio_output_stream = avformat_new_stream(oc, (AVCodec*)audio_input_context->codec); + if ( ! audio_output_stream ) { + Error("Unable to create audio out stream\n"); + audio_output_stream = NULL; + } else { + audio_output_context = audio_output_stream->codec; + + ret = avcodec_copy_context(audio_output_context, audio_input_context); + if (ret < 0) { + Error("Unable to copy audio context %s\n", av_make_error_string(ret).c_str()); + audio_output_stream = NULL; + } else { + audio_output_context->codec_tag = 0; + if ( audio_output_context->channels > 1 ) { + Warning("Audio isn't mono, changing it."); + audio_output_context->channels = 1; + } else { + Debug(3, "Audio is mono"); + } + } + } // end if audio_output_stream + } // end if is AAC + + if ( audio_output_stream ) { if (oc->oformat->flags & AVFMT_GLOBALHEADER) { - audio_st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; + audio_output_context->flags |= CODEC_FLAG_GLOBAL_HEADER; } } - } else { - Debug(3, "No Audio output stream"); - audio_st = NULL; - } + + } // end if audio_input_stream /* open the output file, if needed */ - if (!(fmt->flags & AVFMT_NOFILE)) { + if (!(output_format->flags & AVFMT_NOFILE)) { ret = avio_open2(&oc->pb, filename, AVIO_FLAG_WRITE,NULL,NULL); if (ret < 0) { Fatal("Could not open output file '%s': %s\n", filename, @@ -163,33 +222,84 @@ VideoStore::VideoStore(const char *filename_in, const char *format_in, } } - //av_dict_set(&opts, "movflags", "frag_custom+dash+delay_moov", 0); - //if ((ret = avformat_write_header(ctx, &opts)) < 0) { - //} //os->ctx_inited = 1; //avio_flush(ctx->pb); //av_dict_free(&opts); + zm_dump_stream_format( oc, 0, 0, 1 ); + if ( audio_output_stream ) + zm_dump_stream_format( oc, 1, 0, 1 ); - /* Write the stream header, if any. */ - ret = avformat_write_header(oc, NULL); + AVDictionary * opts = NULL; + //av_dict_set(&opts, "movflags", "frag_custom+dash+delay_moov", 0); + //av_dict_set(&opts, "movflags", "frag_custom+dash+delay_moov", 0); + //av_dict_set(&opts, "movflags", "frag_keyframe+empty_moov+default_base_moof", 0); + if ((ret = avformat_write_header(oc, &opts)) < 0) { + Warning("Unable to set movflags to frag_custom+dash+delay_moov"); + /* Write the stream header, if any. */ + ret = avformat_write_header(oc, NULL); + } else if (av_dict_count(opts) != 0) { + Warning("some options not set\n"); + } if (ret < 0) { - zm_dump_stream_format( oc, 0, 0, 1 ); - Fatal("Error occurred when writing output file header to %s: %s\n", + Error("Error occurred when writing output file header to %s: %s\n", filename, av_make_error_string(ret).c_str()); } + if ( opts ) + av_dict_free(&opts); - prevDts = 0; - startPts = 0; - startDts = 0; - filter_in_rescale_delta_last = AV_NOPTS_VALUE; + video_last_pts = 0; + video_last_dts = 0; + audio_last_pts = 0; + audio_last_dts = 0; + previous_pts = 0; + previous_dts = 0; - startTime=av_gettime()-nStartTime;//oc->start_time; - Info("VideoStore startTime=%d\n",startTime); } // VideoStore::VideoStore VideoStore::~VideoStore(){ + if ( audio_output_codec ) { + // Do we need to flush the outputs? I have no idea. + AVPacket pkt; + int got_packet; + av_init_packet(&pkt); + pkt.data = NULL; + pkt.size = 0; + int64_t size; + + while(1) { +#if LIBAVCODEC_VERSION_CHECK(58, 0, 0, 0, 0) + ret = avcodec_receive_packet( audio_output_context, &pkt ); +#else + ret = avcodec_encode_audio2( audio_output_context, &pkt, NULL, &got_packet ); +#endif + if (ret < 0) { + Error("ERror encoding audio while flushing"); + break; + } +Debug(1, "Have audio encoder, need to flush it's output" ); + size += pkt.size; + if (!got_packet) { + break; + } +Debug(2, "writing flushed packet pts(%d) dts(%d) duration(%d)", pkt.pts, pkt.dts, pkt.duration ); + if (pkt.pts != AV_NOPTS_VALUE) + pkt.pts = av_rescale_q(pkt.pts, audio_output_context->time_base, audio_output_stream->time_base); + if (pkt.dts != AV_NOPTS_VALUE) + pkt.dts = av_rescale_q(pkt.dts, audio_output_context->time_base, audio_output_stream->time_base); + if (pkt.duration > 0) + pkt.duration = av_rescale_q(pkt.duration, audio_output_context->time_base, audio_output_stream->time_base); +Debug(2, "writing flushed packet pts(%d) dts(%d) duration(%d)", pkt.pts, pkt.dts, pkt.duration ); + pkt.stream_index = audio_output_stream->index; + av_interleaved_write_frame( oc, &pkt ); + zm_av_packet_unref( &pkt ); + } // while 1 + } + + // Flush Queues + av_interleaved_write_frame( oc, NULL ); + /* Write the trailer before close */ if ( int rc = av_write_trailer(oc) ) { Error("Error writing trailer %s", av_err2str( rc ) ); @@ -200,15 +310,21 @@ VideoStore::~VideoStore(){ // I wonder if we should be closing the file first. // I also wonder if we really need to be doing all the context allocation/de-allocation constantly, or whether we can just re-use it. Just do a file open/close/writeheader/etc. // What if we were only doing audio recording? - if ( video_st ) { - avcodec_close(video_st->codec); + if ( video_output_stream ) { + avcodec_close(video_output_context); } - if (audio_st) { - avcodec_close(audio_st->codec); + if (audio_output_stream) { + avcodec_close(audio_output_context); +#ifdef HAVE_LIBAVRESAMPLE + if ( resample_context ) { + avresample_close( resample_context ); + avresample_free( &resample_context ); + } +#endif } // WHen will be not using a file ? - if (!(fmt->flags & AVFMT_NOFILE)) { + if (!(output_format->flags & AVFMT_NOFILE)) { /* Close the output file. */ if ( int rc = avio_close(oc->pb) ) { Error("Error closing avio %s", av_err2str( rc ) ); @@ -221,6 +337,198 @@ VideoStore::~VideoStore(){ avformat_free_context(oc); } +bool VideoStore::setup_resampler() { +#ifdef HAVE_LIBAVRESAMPLE + static char error_buffer[256]; + + audio_output_codec = avcodec_find_encoder(AV_CODEC_ID_AAC); + if ( ! audio_output_codec ) { + Error("Could not find codec for AAC"); + return false; + } + Debug(2, "Have audio output codec"); + + audio_output_stream = avformat_new_stream( oc, audio_output_codec ); + audio_output_context = audio_output_stream->codec; + + if ( ! audio_output_context ) { + Error( "could not allocate codec context for AAC\n"); + audio_output_stream = NULL; + return false; + } + + Debug(2, "Have audio_output_context"); + + AVDictionary *opts = NULL; + av_dict_set(&opts, "strict", "experimental", 0); + + /* put sample parameters */ + audio_output_context->bit_rate = audio_input_context->bit_rate; + audio_output_context->sample_rate = audio_input_context->sample_rate; + audio_output_context->channels = audio_input_context->channels; + audio_output_context->channel_layout = audio_input_context->channel_layout; + audio_output_context->sample_fmt = audio_input_context->sample_fmt; + //audio_output_context->refcounted_frames = 1; + + if (audio_output_codec->supported_samplerates) { + int found = 0; + for ( unsigned int i = 0; audio_output_codec->supported_samplerates[i]; i++) { + if ( audio_output_context->sample_rate == audio_output_codec->supported_samplerates[i] ) { + found = 1; + break; + } + } + if ( found ) { + Debug(3, "Sample rate is good"); + } else { + audio_output_context->sample_rate = audio_output_codec->supported_samplerates[0]; + Debug(1, "Sampel rate is no good, setting to (%d)", audio_output_codec->supported_samplerates[0] ); + } + } + + /* check that the encoder supports s16 pcm input */ + if (!check_sample_fmt( audio_output_codec, audio_output_context->sample_fmt)) { + Debug( 3, "Encoder does not support sample format %s, setting to FLTP", + av_get_sample_fmt_name( audio_output_context->sample_fmt)); + audio_output_context->sample_fmt = AV_SAMPLE_FMT_FLTP; + } + + //audio_output_stream->time_base = audio_input_stream->time_base; + audio_output_context->time_base = (AVRational){ 1, audio_output_context->sample_rate }; + + Debug(3, "Audio Time bases input stream (%d/%d) input codec: (%d/%d) output_stream (%d/%d) output codec (%d/%d)", + audio_input_stream->time_base.num, + audio_input_stream->time_base.den, + audio_input_context->time_base.num, + audio_input_context->time_base.den, + audio_output_stream->time_base.num, + audio_output_stream->time_base.den, + audio_output_context->time_base.num, + audio_output_context->time_base.den + ); + + ret = avcodec_open2(audio_output_context, audio_output_codec, &opts ); + av_dict_free(&opts); + if ( ret < 0 ) { + av_strerror(ret, error_buffer, sizeof(error_buffer)); + Fatal( "could not open codec (%d) (%s)\n", ret, error_buffer ); + audio_output_codec = NULL; + audio_output_context = NULL; + audio_output_stream = NULL; + return false; + } + + Debug(1, "Audio output bit_rate (%d) sample_rate(%d) channels(%d) fmt(%d) layout(%d) frame_size(%d)", + audio_output_context->bit_rate, + audio_output_context->sample_rate, + audio_output_context->channels, + audio_output_context->sample_fmt, + audio_output_context->channel_layout, + audio_output_context->frame_size + ); + + output_frame_size = audio_output_context->frame_size; + /** Create a new frame to store the audio samples. */ + if (!(input_frame = zm_av_frame_alloc())) { + Error("Could not allocate input frame"); + return false; + } + + /** Create a new frame to store the audio samples. */ + if (!(output_frame = zm_av_frame_alloc())) { + Error("Could not allocate output frame"); + av_frame_free( &input_frame ); + return false; + } + + // Setup the audio resampler + resample_context = avresample_alloc_context(); + if ( ! resample_context ) { + Error( "Could not allocate resample context\n"); + return false; + } + + // Some formats (i.e. WAV) do not produce the proper channel layout + if ( audio_input_context->channel_layout == 0 ) { + Error( "Bad channel layout. Need to set it to mono.\n"); + av_opt_set_int( resample_context, "in_channel_layout", av_get_channel_layout( "mono" ), 0 ); + } else { + av_opt_set_int( resample_context, "in_channel_layout", audio_input_context->channel_layout, 0 ); + } + + av_opt_set_int( resample_context, "in_sample_fmt", audio_input_context->sample_fmt, 0); + av_opt_set_int( resample_context, "in_sample_rate", audio_input_context->sample_rate, 0); + av_opt_set_int( resample_context, "in_channels", audio_input_context->channels,0); + //av_opt_set_int( resample_context, "out_channel_layout", audio_output_context->channel_layout, 0); + av_opt_set_int( resample_context, "out_channel_layout", av_get_channel_layout( "mono" ), 0 ); + av_opt_set_int( resample_context, "out_sample_fmt", audio_output_context->sample_fmt, 0); + av_opt_set_int( resample_context, "out_sample_rate", audio_output_context->sample_rate, 0); + av_opt_set_int( resample_context, "out_channels", audio_output_context->channels, 0); + + ret = avresample_open( resample_context ); + if ( ret < 0 ) { + Error( "Could not open resample context\n"); + return false; + } + +#if 0 + /** + * Allocate as many pointers as there are audio channels. + * Each pointer will later point to the audio samples of the corresponding + * channels (although it may be NULL for interleaved formats). + */ + if (!( converted_input_samples = (uint8_t *)calloc( audio_output_context->channels, sizeof(*converted_input_samples))) ) { + Error( "Could not allocate converted input sample pointers\n"); + return; + } + /** + * Allocate memory for the samples of all channels in one consecutive + * block for convenience. + */ + if ((ret = av_samples_alloc( &converted_input_samples, NULL, + audio_output_context->channels, + audio_output_context->frame_size, + audio_output_context->sample_fmt, 0)) < 0) { + Error( "Could not allocate converted input samples (error '%s')\n", + av_make_error_string(ret).c_str() ); + + av_freep(converted_input_samples); + free(converted_input_samples); + return; + } +#endif + + output_frame->nb_samples = audio_output_context->frame_size; + output_frame->format = audio_output_context->sample_fmt; + output_frame->channel_layout = audio_output_context->channel_layout; + + // The codec gives us the frame size, in samples, we calculate the size of the samples buffer in bytes + unsigned int audioSampleBuffer_size = av_samples_get_buffer_size( NULL, audio_output_context->channels, audio_output_context->frame_size, audio_output_context->sample_fmt, 0 ); + converted_input_samples = (uint8_t*) av_malloc( audioSampleBuffer_size ); + + if ( !converted_input_samples ) { + Error( "Could not allocate converted input sample pointers\n"); + return false; + } + + // Setup the data pointers in the AVFrame + if ( avcodec_fill_audio_frame( + output_frame, + audio_output_context->channels, + audio_output_context->sample_fmt, + (const uint8_t*) converted_input_samples, + audioSampleBuffer_size, 0 ) < 0 ) { + Error( "Could not allocate converted input sample pointers\n"); + return false; + } + + return true; +#else + Error("Not built with libavresample library. Cannot do audio conversion to AAC"); + return false; +#endif +} + void VideoStore::dumpPacket( AVPacket *pkt ){ char b[10240]; @@ -233,40 +541,84 @@ void VideoStore::dumpPacket( AVPacket *pkt ){ , pkt->stream_index , pkt->flags , pkt->pos - , pkt->convergence_duration + , pkt->duration ); - Info("%s:%d:DEBUG: %s", __FILE__, __LINE__, b); + Debug(1, "%s:%d:DEBUG: %s", __FILE__, __LINE__, b); } -int VideoStore::writeVideoFramePacket(AVPacket *ipkt, AVStream *input_st){//, AVPacket *lastKeyframePkt){ - - //Debug(3, "before ost_tbcket %d", startTime ); - //zm_dump_stream_format( oc, ipkt->stream_index, 0, 1 ); - //Debug(3, "before ost_tbcket %d", startTime ); - int64_t ost_tb_start_time = av_rescale_q(startTime, AV_TIME_BASE_Q, video_st->time_base); - - AVPacket opkt, safepkt; - AVPicture pict; - +int VideoStore::writeVideoFramePacket( AVPacket *ipkt ) { av_init_packet(&opkt); + int duration; + //Scale the PTS of the outgoing packet to be the correct time base if (ipkt->pts != AV_NOPTS_VALUE) { - opkt.pts = av_rescale_q(ipkt->pts-startPts, input_st->time_base, video_st->time_base) - ost_tb_start_time; + + if ( ! video_last_pts ) { + // This is the first packet. + opkt.pts = 0; + Debug(2, "Starting video video_last_pts will become (%d)", ipkt->pts ); + } else { + if ( ipkt->pts < video_last_pts ) { + Debug(1, "Resetting video_last_pts from (%d) to (%d)", video_last_pts, ipkt->pts ); + // wrap around, need to figure out the distance FIXME having this wrong should cause a jump, but then play ok? + opkt.pts = previous_pts + av_rescale_q( ipkt->pts, video_input_stream->time_base, video_output_stream->time_base); + } else { + opkt.pts = previous_pts + av_rescale_q( ipkt->pts - video_last_pts, video_input_stream->time_base, video_output_stream->time_base); + } + } + Debug(3, "opkt.pts = %d from ipkt->pts(%d) - last_pts(%d)", opkt.pts, ipkt->pts, video_last_pts ); + duration = ipkt->pts - video_last_pts; + video_last_pts = ipkt->pts; } else { + Debug(3, "opkt.pts = undef"); opkt.pts = AV_NOPTS_VALUE; } //Scale the DTS of the outgoing packet to be the correct time base - if(ipkt->dts == AV_NOPTS_VALUE) { - opkt.dts = av_rescale_q(input_st->cur_dts-startDts, AV_TIME_BASE_Q, video_st->time_base); + + // Just because the input stream wraps, doesn't mean the output needs to. Really, if we are limiting ourselves to 10min segments I can't imagine every wrapping in the output. So need to handle input wrap, without causing output wrap. + if ( ! video_last_dts ) { + // This is the first packet. + opkt.dts = 0; + Debug(1, "Starting video video_last_dts will become (%d)", ipkt->dts ); + video_last_dts = ipkt->dts; } else { - opkt.dts = av_rescale_q(ipkt->dts-startDts, input_st->time_base, video_st->time_base); + if ( ipkt->dts == AV_NOPTS_VALUE ) { + // why are we using cur_dts instead of packet.dts? I think cur_dts is in AV_TIME_BASE_Q, but ipkt.dts is in video_input_stream->time_base + if ( video_input_stream->cur_dts < video_last_dts ) { + Debug(1, "Resetting video_last_dts from (%d) to (%d) p.dts was (%d)", video_last_dts, video_input_stream->cur_dts, ipkt->dts ); + opkt.dts = previous_dts + av_rescale_q(video_input_stream->cur_dts, AV_TIME_BASE_Q, video_output_stream->time_base); + } else { + opkt.dts = previous_dts + av_rescale_q(video_input_stream->cur_dts - video_last_dts, AV_TIME_BASE_Q, video_output_stream->time_base); + } + Debug(3, "opkt.dts = %d from video_input_stream->cur_dts(%d) - previus_dts(%d)", + opkt.dts, video_input_stream->cur_dts, video_last_dts + ); + video_last_dts = video_input_stream->cur_dts; + } else { + if ( ipkt->dts < video_last_dts ) { + Debug(1, "Resetting video_last_dts from (%d) to (%d)", video_last_dts, ipkt->dts ); + opkt.dts = previous_dts + av_rescale_q( ipkt->dts, video_input_stream->time_base, video_output_stream->time_base); + } else { + opkt.dts = previous_dts + av_rescale_q( ipkt->dts - video_last_dts, video_input_stream->time_base, video_output_stream->time_base); + } + Debug(3, "opkt.dts = %d from ipkt.dts(%d) - previus_dts(%d)", + opkt.dts, ipkt->dts, video_last_dts + ); + video_last_dts = ipkt->dts; + } + } + if ( opkt.dts > opkt.pts ) { + Debug( 1, "opkt.dts(%d) must be <= opkt.pts(%d). Decompression must happen before presentation.", opkt.dts, opkt.pts ); + opkt.dts = opkt.pts; } - opkt.dts -= ost_tb_start_time; - - opkt.duration = av_rescale_q(ipkt->duration, input_st->time_base, video_st->time_base); + if ( ipkt->duration == AV_NOPTS_VALUE ) { + opkt.duration = av_rescale_q( duration, video_input_stream->time_base, video_output_stream->time_base); + } else { + opkt.duration = av_rescale_q(ipkt->duration, video_input_stream->time_base, video_output_stream->time_base); + } opkt.flags = ipkt->flags; opkt.pos=-1; @@ -274,38 +626,31 @@ int VideoStore::writeVideoFramePacket(AVPacket *ipkt, AVStream *input_st){//, AV opkt.size = ipkt->size; // Some camera have audio on stream 0 and video on stream 1. So when we remove the audio, video stream has to go on 0 - if ( ipkt->stream_index > 0 and ! audio_st ) { + if ( ipkt->stream_index > 0 and ! audio_output_stream ) { Debug(1,"Setting stream index to 0 instead of %d", ipkt->stream_index ); opkt.stream_index = 0; } else { opkt.stream_index = ipkt->stream_index; } - /*opkt.flags |= AV_PKT_FLAG_KEY;*/ - - if (video_st->codec->codec_type == AVMEDIA_TYPE_VIDEO && (fmt->flags & AVFMT_RAWPICTURE)) { - /* store AVPicture in AVPacket, as expected by the output format */ - avpicture_fill(&pict, opkt.data, video_st->codec->pix_fmt, video_st->codec->width, video_st->codec->height); - opkt.data = (uint8_t *)&pict; - opkt.size = sizeof(AVPicture); - opkt.flags |= AV_PKT_FLAG_KEY; - } - + AVPacket safepkt; memcpy(&safepkt, &opkt, sizeof(AVPacket)); +Debug(1, "writing video packet pts(%d) dts(%d) duration(%d)", opkt.pts, opkt.dts, opkt.duration ); if ((opkt.data == NULL)||(opkt.size < 1)) { Warning("%s:%d: Mangled AVPacket: discarding frame", __FILE__, __LINE__ ); + dumpPacket( ipkt); dumpPacket(&opkt); - } else if ((prevDts > 0) && (prevDts >= opkt.dts)) { - Warning("%s:%d: DTS out of order: %lld \u226E %lld; discarding frame", __FILE__, __LINE__, prevDts, opkt.dts); - prevDts = opkt.dts; + } else if ((previous_dts > 0) && (previous_dts > opkt.dts)) { + Warning("%s:%d: DTS out of order: %lld \u226E %lld; discarding frame", __FILE__, __LINE__, previous_dts, opkt.dts); + previous_dts = opkt.dts; dumpPacket(&opkt); } else { - int ret; - prevDts = opkt.dts; // Unsure if av_interleaved_write_frame() clobbers opkt.dts when out of order, so storing in advance + previous_dts = opkt.dts; // Unsure if av_interleaved_write_frame() clobbers opkt.dts when out of order, so storing in advance + previous_pts = opkt.pts; ret = av_interleaved_write_frame(oc, &opkt); if(ret<0){ // There's nothing we can really do if the frame is rejected, just drop it and get on with the next @@ -314,79 +659,218 @@ int VideoStore::writeVideoFramePacket(AVPacket *ipkt, AVStream *input_st){//, AV } } - av_free_packet(&opkt); + zm_av_packet_unref(&opkt); return 0; + } -int VideoStore::writeAudioFramePacket(AVPacket *ipkt, AVStream *input_st){ +int VideoStore::writeAudioFramePacket( AVPacket *ipkt ) { + Debug(4, "writeAudioFrame"); - if(!audio_st) { - Error("Called writeAudioFramePacket when no audio_st"); - return -1;//FIXME -ve return codes do not free packet in ffmpeg_camera at the moment + if(!audio_output_stream) { + Debug(1, "Called writeAudioFramePacket when no audio_output_stream"); + return 0;//FIXME -ve return codes do not free packet in ffmpeg_camera at the moment } - /*if(!keyframeMessage) - return -1;*/ - //zm_dump_stream_format( oc, ipkt->stream_index, 0, 1 ); - // What is this doing? Getting the time of the start of this video chunk? Does that actually make sense? - int64_t ost_tb_start_time = av_rescale_q(startTime, AV_TIME_BASE_Q, audio_st->time_base); - AVPacket opkt; + if ( audio_output_codec ) { +#ifdef HAVE_LIBAVRESAMPLE - av_init_packet(&opkt); - Debug(3, "after init packet" ); +#if 0 + ret = avcodec_send_packet( audio_input_context, ipkt ); + if ( ret < 0 ) { + Error("avcodec_send_packet fail %s", av_make_error_string(ret).c_str()); + return 0; + } + ret = avcodec_receive_frame( audio_input_context, input_frame ); + if ( ret < 0 ) { + Error("avcodec_receive_frame fail %s", av_make_error_string(ret).c_str()); + return 0; + } + Debug(2, "Frame: samples(%d), format(%d), sample_rate(%d), channel layout(%d) refd(%d)", + input_frame->nb_samples, + input_frame->format, + input_frame->sample_rate, + input_frame->channel_layout, + audio_output_context->refcounted_frames + ); + + ret = avcodec_send_frame( audio_output_context, input_frame ); + if ( ret < 0 ) { + av_frame_unref( input_frame ); + Error("avcodec_send_frame fail(%d), %s codec is open(%d) is_encoder(%d)", ret, av_make_error_string(ret).c_str(), + avcodec_is_open( audio_output_context ), + av_codec_is_encoder( audio_output_context->codec) + ); + return 0; + } + ret = avcodec_receive_packet( audio_output_context, &opkt ); + if ( ret < 0 ) { + av_frame_unref( input_frame ); + Error("avcodec_receive_packet fail %s", av_make_error_string(ret).c_str()); + return 0; + } + av_frame_unref( input_frame ); +#else + + + /** + * Decode the audio frame stored in the packet. + * The input audio stream decoder is used to do this. + * If we are at the end of the file, pass an empty packet to the decoder + * to flush it. + */ + if ((ret = avcodec_decode_audio4(audio_input_context, input_frame, + &data_present, ipkt)) < 0) { + Error( "Could not decode frame (error '%s')\n", + av_make_error_string(ret).c_str()); + dumpPacket( ipkt ); + av_frame_free( &input_frame ); + zm_av_packet_unref( &opkt ); + return 0; + } + if ( ! data_present ) { + Debug(2, "Not ready to transcode a frame yet."); + zm_av_packet_unref(&opkt); + return 0; + } + + int frame_size = input_frame->nb_samples; + Debug(4, "Frame size: %d", frame_size ); + + // Resample the input into the audioSampleBuffer until we proceed the whole decoded data + if ( (ret = avresample_convert( resample_context, + NULL, + 0, + 0, + input_frame->data, + 0, + input_frame->nb_samples )) < 0 ) { + Error( "Could not resample frame (error '%s')\n", + av_make_error_string(ret).c_str()); + return 0; + } + + if ( avresample_available( resample_context ) < output_frame->nb_samples ) { + Debug(1, "No enough samples yet"); + return 0; + } + + // Read a frame audio data from the resample fifo + if ( avresample_read( resample_context, output_frame->data, output_frame->nb_samples ) != output_frame->nb_samples ) { + Warning( "Error reading resampled audio: " ); + return 0; + } + + av_init_packet(&opkt); + Debug(5, "after init packet" ); + + /** Set a timestamp based on the sample rate for the container. */ + //output_frame->pts = av_rescale_q( opkt.pts, audio_output_context->time_base, audio_output_stream->time_base ); + + // convert the packet to the codec timebase from the stream timebase + //Debug(3, "output_frame->pts(%d) best effort(%d)", output_frame->pts, + //av_frame_get_best_effort_timestamp(output_frame) + //); + /** + * Encode the audio frame and store it in the temporary packet. + * The output audio stream encoder is used to do this. + */ +#if LIBAVCODEC_VERSION_CHECK(58, 0, 0, 0, 0) + if (( ret = avcodec_receive_packet( audio_output_context, &opkt )) < 0 ) { +#else + if (( ret = avcodec_encode_audio2( audio_output_context, &opkt, output_frame, &data_present )) < 0) { +#endif + Error( "Could not encode frame (error '%s')", + av_make_error_string(ret).c_str()); + zm_av_packet_unref(&opkt); + return 0; + } + if ( ! data_present ) { + Debug(2, "Not ready to output a frame yet."); + zm_av_packet_unref(&opkt); + return 0; + } + +#endif +#endif + } else { + av_init_packet(&opkt); + Debug(5, "after init packet" ); + opkt.data = ipkt->data; + opkt.size = ipkt->size; + } + + // PTS is difficult, because of the buffering of the audio packets in the resampler. So we have to do it once we actually have a packet... //Scale the PTS of the outgoing packet to be the correct time base - if (ipkt->pts != AV_NOPTS_VALUE) { - Debug(3, "Rescaling output pts"); - opkt.pts = av_rescale_q(ipkt->pts-startPts, input_st->time_base, audio_st->time_base) - ost_tb_start_time; + if ( ipkt->pts != AV_NOPTS_VALUE ) { + if ( !audio_last_pts ) { + opkt.pts = 0; + } else { + if ( audio_last_pts > ipkt->pts ) { + Debug(1, "Resetting audeo_start_pts from (%d) to (%d)", audio_last_pts, ipkt->pts ); + } + opkt.pts = previous_pts + av_rescale_q(ipkt->pts - audio_last_pts, audio_input_stream->time_base, audio_output_stream->time_base); + Debug(2, "opkt.pts = %d from ipkt->pts(%d) - last_pts(%d)", opkt.pts, ipkt->pts, audio_last_pts ); + } + audio_last_pts = ipkt->pts; } else { - Debug(3, "Setting output pts to AV_NOPTS_VALUE"); + Debug(2, "opkt.pts = undef"); opkt.pts = AV_NOPTS_VALUE; } //Scale the DTS of the outgoing packet to be the correct time base - if(ipkt->dts == AV_NOPTS_VALUE) { - Debug(4, "ipkt->dts == AV_NOPTS_VALUE %d to %d", AV_NOPTS_VALUE, opkt.dts ); - opkt.dts = av_rescale_q(input_st->cur_dts-startDts, AV_TIME_BASE_Q, audio_st->time_base); - Debug(4, "ipkt->dts == AV_NOPTS_VALUE %d to %d", AV_NOPTS_VALUE, opkt.dts ); + if ( ! audio_last_dts ) { + opkt.dts = 0; } else { - Debug(4, "ipkt->dts != AV_NOPTS_VALUE %d to %d", AV_NOPTS_VALUE, opkt.dts ); - opkt.dts = av_rescale_q(ipkt->dts-startDts, input_st->time_base, audio_st->time_base); - Debug(4, "ipkt->dts != AV_NOPTS_VALUE %d to %d", AV_NOPTS_VALUE, opkt.dts ); + if( ipkt->dts == AV_NOPTS_VALUE ) { + // So if the input has no dts assigned... still need an output dts... so we use cur_dts? + + if ( audio_last_dts > audio_input_stream->cur_dts ) { + Debug(1, "Resetting audio_last_pts from (%d) to cur_dts (%d)", audio_last_dts, audio_input_stream->cur_dts ); + opkt.dts = previous_dts + av_rescale_q( audio_input_stream->cur_dts, AV_TIME_BASE_Q, audio_output_stream->time_base); + } else { + opkt.dts = previous_dts + av_rescale_q( audio_input_stream->cur_dts - audio_last_dts, AV_TIME_BASE_Q, audio_output_stream->time_base); + } + audio_last_dts = audio_input_stream->cur_dts; + Debug(2, "opkt.dts = %d from video_input_stream->cur_dts(%d) - last_dts(%d)", opkt.dts, audio_input_stream->cur_dts, audio_last_dts ); + } else { + if ( audio_last_dts > ipkt->dts ) { + Debug(1, "Resetting audio_last_dts from (%d) to (%d)", audio_last_dts, ipkt->dts ); + opkt.dts = previous_dts + av_rescale_q(ipkt->dts, audio_input_stream->time_base, audio_output_stream->time_base); + } else { + opkt.dts = previous_dts + av_rescale_q(ipkt->dts - audio_last_dts, audio_input_stream->time_base, audio_output_stream->time_base); + } + Debug(2, "opkt.dts = %d from ipkt->dts(%d) - last_dts(%d)", opkt.dts, ipkt->dts, audio_last_dts ); + } } - opkt.dts -= ost_tb_start_time; - - // Seems like it would be really weird for the codec type to NOT be audiu - if (audio_st->codec->codec_type == AVMEDIA_TYPE_AUDIO && ipkt->dts != AV_NOPTS_VALUE) { - Debug( 4, "code is audio, dts != AV_NOPTS_VALUE " ); - int duration = av_get_audio_frame_duration(input_st->codec, ipkt->size); - if(!duration) - duration = input_st->codec->frame_size; - - //FIXME where to get filter_in_rescale_delta_last - //FIXME av_rescale_delta doesn't exist in ubuntu vivid libavtools - opkt.dts = opkt.pts = av_rescale_delta(input_st->time_base, ipkt->dts, - (AVRational){1, input_st->codec->sample_rate}, duration, &filter_in_rescale_delta_last, - audio_st->time_base) - ost_tb_start_time; + if ( opkt.dts > opkt.pts ) { + Debug(1,"opkt.dts(%d) must be <= opkt.pts(%d). Decompression must happen before presentation.", opkt.dts, opkt.pts ); + opkt.dts = opkt.pts; } - opkt.duration = av_rescale_q(ipkt->duration, input_st->time_base, audio_st->time_base); - opkt.pos=-1; + // I wonder if we could just use duration instead of all the hoop jumping above? + opkt.duration = av_rescale_q(ipkt->duration, audio_input_stream->time_base, audio_output_stream->time_base); + + // pkt.pos: byte position in stream, -1 if unknown + opkt.pos = -1; opkt.flags = ipkt->flags; - - opkt.data = ipkt->data; - opkt.size = ipkt->size; opkt.stream_index = ipkt->stream_index; + Debug(2, "Stream index is %d", opkt.stream_index ); - int ret; + AVPacket safepkt; + memcpy(&safepkt, &opkt, sizeof(AVPacket)); ret = av_interleaved_write_frame(oc, &opkt); if(ret!=0){ - Fatal("Error encoding audio frame packet: %s\n", av_make_error_string(ret).c_str()); + Error("Error writing audio frame packet: %s\n", av_make_error_string(ret).c_str()); + dumpPacket(&safepkt); + } else { + Debug(2,"Success writing audio frame" ); } - Debug(4,"Success writing audio frame" ); - av_free_packet(&opkt); + zm_av_packet_unref(&opkt); return 0; -} +} // end int VideoStore::writeAudioFramePacket( AVPacket *ipkt ) + diff --git a/src/zm_videostore.h b/src/zm_videostore.h index a11973b4a..b76153cb8 100644 --- a/src/zm_videostore.h +++ b/src/zm_videostore.h @@ -2,6 +2,13 @@ #define ZM_VIDEOSTORE_H #include "zm_ffmpeg.h" +extern "C" { +#include "libavutil/audio_fifo.h" + +#ifdef HAVE_LIBAVRESAMPLE +#include "libavresample/avresample.h" +#endif +} #if HAVE_LIBAVCODEC @@ -9,45 +16,67 @@ class VideoStore { private: + unsigned int packets_written; - AVOutputFormat *fmt; + AVOutputFormat *output_format; AVFormatContext *oc; - AVStream *video_st; - AVStream *audio_st; + AVStream *video_output_stream; + AVStream *audio_output_stream; + AVCodecContext *video_output_context; + + AVStream *video_input_stream; + AVStream *audio_input_stream; + + // Move this into the object so that we aren't constantly allocating/deallocating it on the stack + AVPacket opkt; + // we are transcoding + AVFrame *input_frame; + AVFrame *output_frame; + + AVCodecContext *video_input_context; + AVCodecContext *audio_input_context; + int ret; + + // The following are used when encoding the audio stream to AAC + AVCodec *audio_output_codec; + AVCodecContext *audio_output_context; + int data_present; + AVAudioFifo *fifo; + int output_frame_size; +#ifdef HAVE_LIBAVRESAMPLE +AVAudioResampleContext* resample_context; +#endif + uint8_t *converted_input_samples = NULL; const char *filename; const char *format; - bool keyframeMessage; - int keyframeSkipNumber; + bool keyframeMessage; + int keyframeSkipNumber; - int64_t startTime; - int64_t startPts; - int64_t startDts; - int64_t prevDts; - int64_t filter_in_rescale_delta_last; + // These are for input + int64_t video_last_pts; + int64_t video_last_dts; + int64_t audio_last_pts; + int64_t audio_last_dts; + + // These are for output, should start at zero. We assume they do not wrap because we just aren't going to save files that big. + int64_t previous_pts; + int64_t previous_dts; + + int64_t filter_in_rescale_delta_last; + + bool setup_resampler(); public: - VideoStore(const char *filename_in, const char *format_in, AVStream *input_st, AVStream *inpaud_st, int64_t nStartTime, Monitor::Orientation p_orientation ); + VideoStore(const char *filename_in, const char *format_in, AVStream *video_input_stream, AVStream *audio_input_stream, int64_t nStartTime, Monitor * p_monitor ); ~VideoStore(); - int writeVideoFramePacket(AVPacket *pkt, AVStream *input_st);//, AVPacket *lastKeyframePkt); - int writeAudioFramePacket(AVPacket *pkt, AVStream *input_st); - void dumpPacket( AVPacket *pkt ); + int writeVideoFramePacket( AVPacket *pkt ); + int writeAudioFramePacket( AVPacket *pkt ); + void dumpPacket( AVPacket *pkt ); }; -/* -class VideoEvent { -public: - VideoEvent(unsigned int eid); - ~VideoEvent(); - - int createEventImage(unsigned int fid, char *&pBuff); - -private: - unsigned int m_eid; -};*/ - #endif //havelibav #endif //zm_videostore_h diff --git a/zoneminder-config.cmake b/zoneminder-config.cmake index 728093881..3b35684b5 100644 --- a/zoneminder-config.cmake +++ b/zoneminder-config.cmake @@ -42,12 +42,6 @@ #cmakedefine HAVE_GNUTLS_GNUTLS_H 1 #cmakedefine HAVE_LIBMYSQLCLIENT 1 #cmakedefine HAVE_MYSQL_H 1 -#cmakedefine HAVE_LIBX264 1 -#cmakedefine HAVE_X264_H 1 -#cmakedefine HAVE_LIBMP4V2 1 -#cmakedefine HAVE_MP4V2_MP4V2_H 1 -#cmakedefine HAVE_MP4V2_H 1 -#cmakedefine HAVE_MP4_H 1 #cmakedefine HAVE_LIBAVFORMAT 1 #cmakedefine HAVE_LIBAVFORMAT_AVFORMAT_H 1 #cmakedefine HAVE_LIBAVCODEC 1 @@ -59,8 +53,16 @@ #cmakedefine HAVE_LIBAVUTIL_MATHEMATICS_H 1 #cmakedefine HAVE_LIBSWSCALE 1 #cmakedefine HAVE_LIBSWSCALE_SWSCALE_H 1 +#cmakedefine HAVE_LIBAVRESAMPLE 1 +#cmakedefine HAVE_LIBAVRESAMPLE_AVRESAMPLE_H 1 #cmakedefine HAVE_LIBVLC 1 #cmakedefine HAVE_VLC_VLC_H 1 +#cmakedefine HAVE_LIBX264 1 +#cmakedefine HAVE_X264_H 1 +#cmakedefine HAVE_LIBMP4V2 1 +#cmakedefine HAVE_MP4_H 1 +#cmakedefine HAVE_MP4V2_H 1 +#cmakedefine HAVE_MP4V2_MP4V2_H 1 /* Authenication checks */ #cmakedefine HAVE_MD5_OPENSSL 1