Convert NULL/0 to nullptr
The 0 -> nullptr changes should definitely improve readability.
This commit is contained in:
parent
9cc71900ba
commit
8f980a1168
|
@ -20,4 +20,4 @@
|
|||
#include "zm.h"
|
||||
|
||||
/* This is our argv[0], we need it for backtrace */
|
||||
const char* self = 0;
|
||||
const char* self = nullptr;
|
||||
|
|
|
@ -34,7 +34,7 @@ protected:
|
|||
unsigned char *mTail;
|
||||
|
||||
public:
|
||||
Buffer() : mStorage( 0 ), mAllocation( 0 ), mSize( 0 ), mHead( 0 ), mTail( 0 ) {
|
||||
Buffer() : mStorage( nullptr ), mAllocation( 0 ), mSize( 0 ), mHead( nullptr ), mTail( nullptr ) {
|
||||
}
|
||||
explicit Buffer( unsigned int pSize ) : mAllocation( pSize ), mSize( 0 ) {
|
||||
mHead = mStorage = new unsigned char[mAllocation];
|
||||
|
|
|
@ -55,7 +55,7 @@ Camera::Camera(
|
|||
Debug(2, "New camera id: %d width: %d line size: %d height: %d colours: %d subpixelorder: %d capture: %d",
|
||||
monitor_id, width, linesize, height, colours, subpixelorder, capture);
|
||||
|
||||
monitor = NULL;
|
||||
monitor = nullptr;
|
||||
}
|
||||
|
||||
Camera::~Camera() {
|
||||
|
|
|
@ -142,13 +142,13 @@ SockAddr *SockAddr::newSockAddr( const struct sockaddr &addr, socklen_t len )
|
|||
return( new SockAddrUnix( (const struct sockaddr_un *)&addr ) );
|
||||
}
|
||||
Error( "Unable to create new SockAddr from addr family %d with size %d", addr.sa_family, len );
|
||||
return( 0 );
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
SockAddr *SockAddr::newSockAddr( const SockAddr *addr )
|
||||
{
|
||||
if ( !addr )
|
||||
return( 0 );
|
||||
return nullptr;
|
||||
|
||||
if ( addr->getDomain() == AF_INET )
|
||||
{
|
||||
|
@ -159,7 +159,7 @@ SockAddr *SockAddr::newSockAddr( const SockAddr *addr )
|
|||
return( new SockAddrUnix( *(SockAddrUnix *)addr ) );
|
||||
}
|
||||
Error( "Unable to create new SockAddr from addr family %d", addr->getDomain() );
|
||||
return( 0 );
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
SockAddrInet::SockAddrInet() : SockAddr( (struct sockaddr *)&mAddrIn )
|
||||
|
@ -170,14 +170,14 @@ bool SockAddrInet::resolve( const char *host, const char *serv, const char *prot
|
|||
{
|
||||
memset( &mAddrIn, 0, sizeof(mAddrIn) );
|
||||
|
||||
struct hostent *hostent=0;
|
||||
struct hostent *hostent=nullptr;
|
||||
if ( !(hostent = ::gethostbyname( host ) ) )
|
||||
{
|
||||
Error( "gethostbyname( %s ), h_errno = %d", host, h_errno );
|
||||
return( false );
|
||||
}
|
||||
|
||||
struct servent *servent=0;
|
||||
struct servent *servent=nullptr;
|
||||
if ( !(servent = ::getservbyname( serv, proto ) ) )
|
||||
{
|
||||
Error( "getservbyname( %s ), errno = %d, error = %s", serv, errno, strerror(errno) );
|
||||
|
@ -195,7 +195,7 @@ bool SockAddrInet::resolve( const char *host, int port, const char *proto )
|
|||
{
|
||||
memset( &mAddrIn, 0, sizeof(mAddrIn) );
|
||||
|
||||
struct hostent *hostent=0;
|
||||
struct hostent *hostent=nullptr;
|
||||
if ( !(hostent = ::gethostbyname( host ) ) )
|
||||
{
|
||||
Error( "gethostbyname( %s ), h_errno = %d", host, h_errno );
|
||||
|
@ -212,7 +212,7 @@ bool SockAddrInet::resolve( const char *serv, const char *proto )
|
|||
{
|
||||
memset( &mAddrIn, 0, sizeof(mAddrIn) );
|
||||
|
||||
struct servent *servent=0;
|
||||
struct servent *servent=nullptr;
|
||||
if ( !(servent = ::getservbyname( serv, proto ) ) )
|
||||
{
|
||||
Error( "getservbyname( %s ), errno = %d, error = %s", serv, errno, strerror(errno) );
|
||||
|
@ -541,7 +541,7 @@ bool InetSocket::connect( const char *host, const char *serv )
|
|||
* If socket(2) (or connect(2)) fails, we (close the socket
|
||||
* and) try the next address. */
|
||||
|
||||
for (rp = result; rp != NULL; rp = rp->ai_next) {
|
||||
for (rp = result; rp != nullptr; rp = rp->ai_next) {
|
||||
if (mSd != -1) {
|
||||
if (::connect(mSd, rp->ai_addr, rp->ai_addrlen) != -1)
|
||||
break; /* Success */
|
||||
|
@ -576,7 +576,7 @@ bool InetSocket::connect( const char *host, const char *serv )
|
|||
|
||||
freeaddrinfo(result); /* No longer needed */
|
||||
|
||||
if (rp == NULL) { /* No address succeeded */
|
||||
if (rp == nullptr) { /* No address succeeded */
|
||||
Error( "connect(), Could not connect" );
|
||||
mAddressFamily = AF_UNSPEC;
|
||||
return( false );
|
||||
|
@ -607,9 +607,9 @@ bool InetSocket::bind( const char * host, const char * serv )
|
|||
hints.ai_socktype = getType();
|
||||
hints.ai_flags = AI_PASSIVE; /* For wildcard IP address */
|
||||
hints.ai_protocol = 0; /* Any protocol */
|
||||
hints.ai_canonname = NULL;
|
||||
hints.ai_addr = NULL;
|
||||
hints.ai_next = NULL;
|
||||
hints.ai_canonname = nullptr;
|
||||
hints.ai_addr = nullptr;
|
||||
hints.ai_next = nullptr;
|
||||
|
||||
s = getaddrinfo(host, serv, &hints, &result);
|
||||
if (s != 0) {
|
||||
|
@ -621,7 +621,7 @@ bool InetSocket::bind( const char * host, const char * serv )
|
|||
* Try each address until we successfully bind(2).
|
||||
* If socket(2) (or bind(2)) fails, we (close the socket
|
||||
* and) try the next address. */
|
||||
for (rp = result; rp != NULL; rp = rp->ai_next) {
|
||||
for (rp = result; rp != nullptr; rp = rp->ai_next) {
|
||||
memset(&buf, 0, sizeof(buf));
|
||||
if (rp->ai_family == AF_INET) {
|
||||
inet_ntop(AF_INET, &((struct sockaddr_in *)rp->ai_addr)->sin_addr, buf, sizeof(buf)-1);
|
||||
|
@ -645,7 +645,7 @@ bool InetSocket::bind( const char * host, const char * serv )
|
|||
mSd = -1;
|
||||
}
|
||||
|
||||
if (rp == NULL) { /* No address succeeded */
|
||||
if (rp == nullptr) { /* No address succeeded */
|
||||
Error( "bind(), Could not bind" );
|
||||
return( false );
|
||||
}
|
||||
|
@ -657,7 +657,7 @@ bool InetSocket::bind( const char * host, const char * serv )
|
|||
|
||||
bool InetSocket::bind( const char * serv )
|
||||
{
|
||||
return bind( NULL, serv);
|
||||
return bind( nullptr, serv);
|
||||
}
|
||||
|
||||
bool InetSocket::bind( const char * host, int port )
|
||||
|
@ -673,7 +673,7 @@ bool InetSocket::bind( int port )
|
|||
char serv[8];
|
||||
snprintf(serv, sizeof(serv), "%d", port);
|
||||
|
||||
return bind( NULL, serv );
|
||||
return bind( nullptr, serv );
|
||||
}
|
||||
|
||||
bool TcpInetServer::listen()
|
||||
|
@ -689,7 +689,7 @@ bool TcpInetServer::accept()
|
|||
bool TcpInetServer::accept( TcpInetSocket *&newSocket )
|
||||
{
|
||||
int newSd = -1;
|
||||
newSocket = 0;
|
||||
newSocket = nullptr;
|
||||
|
||||
if ( !Socket::accept( newSd ) )
|
||||
return( false );
|
||||
|
@ -702,7 +702,7 @@ bool TcpInetServer::accept( TcpInetSocket *&newSocket )
|
|||
bool TcpUnixServer::accept( TcpUnixSocket *&newSocket )
|
||||
{
|
||||
int newSd = -1;
|
||||
newSocket = 0;
|
||||
newSocket = nullptr;
|
||||
|
||||
if ( !Socket::accept( newSd ) )
|
||||
return( false );
|
||||
|
@ -830,7 +830,7 @@ void Select::clearWriters()
|
|||
int Select::wait()
|
||||
{
|
||||
struct timeval tempTimeout = mTimeout;
|
||||
struct timeval *selectTimeout = mHasTimeout?&tempTimeout:NULL;
|
||||
struct timeval *selectTimeout = mHasTimeout?&tempTimeout:nullptr;
|
||||
|
||||
fd_set rfds;
|
||||
fd_set wfds;
|
||||
|
@ -845,7 +845,7 @@ int Select::wait()
|
|||
for ( CommsSet::iterator iter = mWriters.begin(); iter != mWriters.end(); ++iter )
|
||||
FD_SET((*iter)->getWriteDesc(),&wfds);
|
||||
|
||||
int nFound = select( mMaxFd+1, &rfds, &wfds, NULL, selectTimeout );
|
||||
int nFound = select( mMaxFd+1, &rfds, &wfds, nullptr, selectTimeout );
|
||||
if( nFound == 0 )
|
||||
{
|
||||
Debug( 1, "Select timed out" );
|
||||
|
|
|
@ -416,8 +416,8 @@ public:
|
|||
}
|
||||
|
||||
public:
|
||||
virtual int sendto( const void *msg, int len, const SockAddr *addr=0 ) const {
|
||||
ssize_t nBytes = ::sendto( mSd, msg, len, 0, addr?addr->getAddr():NULL, addr?addr->getAddrSize():0 );
|
||||
virtual int sendto( const void *msg, int len, const SockAddr *addr=nullptr ) const {
|
||||
ssize_t nBytes = ::sendto( mSd, msg, len, 0, addr?addr->getAddr():nullptr, addr?addr->getAddrSize():0 );
|
||||
if ( nBytes < 0 )
|
||||
Debug( 1, "Sendto of %d bytes on sd %d failed: %s", len, mSd, strerror(errno) );
|
||||
return( nBytes );
|
||||
|
@ -432,7 +432,7 @@ public:
|
|||
Debug( 1, "Recvfrom of %d bytes max on sd %d (with address) failed: %s", len, mSd, strerror(errno) );
|
||||
}
|
||||
} else {
|
||||
nBytes = ::recvfrom( mSd, msg, len, 0, NULL, 0 );
|
||||
nBytes = ::recvfrom( mSd, msg, len, 0, nullptr, 0 );
|
||||
if ( nBytes < 0 )
|
||||
Debug( 1, "Recvfrom of %d bytes max on sd %d (no address) failed: %s", len, mSd, strerror(errno) );
|
||||
}
|
||||
|
|
|
@ -110,11 +110,11 @@ void zmLoadConfig() {
|
|||
void process_configfile(char const *configFile) {
|
||||
FILE *cfg;
|
||||
char line[512];
|
||||
if ( (cfg = fopen(configFile, "r")) == NULL ) {
|
||||
if ( (cfg = fopen(configFile, "r")) == nullptr ) {
|
||||
Fatal("Can't open %s: %s", configFile, strerror(errno));
|
||||
return;
|
||||
}
|
||||
while ( fgets(line, sizeof(line), cfg) != NULL ) {
|
||||
while ( fgets(line, sizeof(line), cfg) != nullptr ) {
|
||||
char *line_ptr = line;
|
||||
|
||||
// Trim off any cr/lf line endings
|
||||
|
@ -259,16 +259,16 @@ ConfigItem::~ConfigItem() {
|
|||
void ConfigItem::ConvertValue() const {
|
||||
if ( !strcmp( type, "boolean" ) ) {
|
||||
cfg_type = CFG_BOOLEAN;
|
||||
cfg_value.boolean_value = (bool)strtol(value, 0, 0);
|
||||
cfg_value.boolean_value = (bool)strtol(value, nullptr, 0);
|
||||
} else if ( !strcmp(type, "integer") ) {
|
||||
cfg_type = CFG_INTEGER;
|
||||
cfg_value.integer_value = strtol(value, 0, 10);
|
||||
cfg_value.integer_value = strtol(value, nullptr, 10);
|
||||
} else if ( !strcmp(type, "hexadecimal") ) {
|
||||
cfg_type = CFG_INTEGER;
|
||||
cfg_value.integer_value = strtol(value, 0, 16);
|
||||
cfg_value.integer_value = strtol(value, nullptr, 16);
|
||||
} else if ( !strcmp(type, "decimal") ) {
|
||||
cfg_type = CFG_DECIMAL;
|
||||
cfg_value.decimal_value = strtod(value, 0);
|
||||
cfg_value.decimal_value = strtod(value, nullptr);
|
||||
} else {
|
||||
cfg_type = CFG_STRING;
|
||||
cfg_value.string_value = value;
|
||||
|
@ -333,10 +333,10 @@ Config::~Config() {
|
|||
if ( items ) {
|
||||
for ( int i = 0; i < n_items; i++ ) {
|
||||
delete items[i];
|
||||
items[i] = NULL;
|
||||
items[i] = nullptr;
|
||||
}
|
||||
delete[] items;
|
||||
items = NULL;
|
||||
items = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -106,26 +106,26 @@ void cURLCamera::Initialise() {
|
|||
Debug(2,"libcurl version: %s", (*curl_version_f)());
|
||||
|
||||
/* Create the shared data mutex */
|
||||
int nRet = pthread_mutex_init(&shareddata_mutex, NULL);
|
||||
int nRet = pthread_mutex_init(&shareddata_mutex, nullptr);
|
||||
if(nRet != 0) {
|
||||
Error("Shared data mutex creation failed: %s",strerror(nRet));
|
||||
return;
|
||||
}
|
||||
/* Create the data available condition variable */
|
||||
nRet = pthread_cond_init(&data_available_cond, NULL);
|
||||
nRet = pthread_cond_init(&data_available_cond, nullptr);
|
||||
if(nRet != 0) {
|
||||
Error("Data available condition variable creation failed: %s",strerror(nRet));
|
||||
return;
|
||||
}
|
||||
/* Create the request complete condition variable */
|
||||
nRet = pthread_cond_init(&request_complete_cond, NULL);
|
||||
nRet = pthread_cond_init(&request_complete_cond, nullptr);
|
||||
if(nRet != 0) {
|
||||
Error("Request complete condition variable creation failed: %s",strerror(nRet));
|
||||
return;
|
||||
}
|
||||
|
||||
/* Create the thread */
|
||||
nRet = pthread_create(&thread, NULL, thread_func_dispatcher, this);
|
||||
nRet = pthread_create(&thread, nullptr, thread_func_dispatcher, this);
|
||||
if(nRet != 0) {
|
||||
Error("Thread creation failed: %s",strerror(nRet));
|
||||
return;
|
||||
|
@ -137,7 +137,7 @@ void cURLCamera::Terminate() {
|
|||
bTerminate = true;
|
||||
|
||||
/* Wait for thread termination */
|
||||
pthread_join(thread, NULL);
|
||||
pthread_join(thread, nullptr);
|
||||
|
||||
/* Destroy condition variables */
|
||||
pthread_cond_destroy(&request_complete_cond);
|
||||
|
@ -425,7 +425,7 @@ void* cURLCamera::thread_func() {
|
|||
double dSize;
|
||||
|
||||
c = (*curl_easy_init_f)();
|
||||
if(c == NULL) {
|
||||
if(c == nullptr) {
|
||||
dlclose(curl_lib);
|
||||
Error("Failed getting easy handle from libcurl");
|
||||
tRet = -51;
|
||||
|
@ -572,7 +572,7 @@ void* cURLCamera::thread_func() {
|
|||
|
||||
/* Cleanup */
|
||||
(*curl_easy_cleanup_f)(c);
|
||||
c = NULL;
|
||||
c = nullptr;
|
||||
|
||||
return (void*)tRet;
|
||||
}
|
||||
|
|
|
@ -48,10 +48,10 @@ bool zmDbConnect() {
|
|||
staticConfig.DB_SSL_CLIENT_KEY.c_str(),
|
||||
staticConfig.DB_SSL_CLIENT_CERT.c_str(),
|
||||
staticConfig.DB_SSL_CA_CERT.c_str(),
|
||||
NULL, NULL);
|
||||
nullptr, nullptr);
|
||||
std::string::size_type colonIndex = staticConfig.DB_HOST.find(":");
|
||||
if ( colonIndex == std::string::npos ) {
|
||||
if ( !mysql_real_connect(&dbconn, staticConfig.DB_HOST.c_str(), staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), NULL, 0, NULL, 0) ) {
|
||||
if ( !mysql_real_connect(&dbconn, staticConfig.DB_HOST.c_str(), staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), nullptr, 0, nullptr, 0) ) {
|
||||
Error( "Can't connect to server: %s", mysql_error(&dbconn));
|
||||
return false;
|
||||
}
|
||||
|
@ -59,12 +59,12 @@ bool zmDbConnect() {
|
|||
std::string dbHost = staticConfig.DB_HOST.substr( 0, colonIndex );
|
||||
std::string dbPortOrSocket = staticConfig.DB_HOST.substr( colonIndex+1 );
|
||||
if ( dbPortOrSocket[0] == '/' ) {
|
||||
if ( !mysql_real_connect(&dbconn, NULL, staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), NULL, 0, dbPortOrSocket.c_str(), 0) ) {
|
||||
if ( !mysql_real_connect(&dbconn, nullptr, staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), nullptr, 0, dbPortOrSocket.c_str(), 0) ) {
|
||||
Error("Can't connect to server: %s", mysql_error(&dbconn));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if ( !mysql_real_connect( &dbconn, dbHost.c_str(), staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), NULL, atoi(dbPortOrSocket.c_str()), NULL, 0 ) ) {
|
||||
if ( !mysql_real_connect( &dbconn, dbHost.c_str(), staticConfig.DB_USER.c_str(), staticConfig.DB_PASS.c_str(), nullptr, atoi(dbPortOrSocket.c_str()), nullptr, 0 ) ) {
|
||||
Error( "Can't connect to server: %s", mysql_error( &dbconn ) );
|
||||
return false;
|
||||
}
|
||||
|
@ -94,20 +94,20 @@ void zmDbClose() {
|
|||
MYSQL_RES * zmDbFetch(const char * query) {
|
||||
if ( !zmDbConnected ) {
|
||||
Error("Not connected.");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
db_mutex.lock();
|
||||
// Might have been disconnected while we waited for the lock
|
||||
if ( !zmDbConnected ) {
|
||||
db_mutex.unlock();
|
||||
Error("Not connected.");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if ( mysql_query(&dbconn, query) ) {
|
||||
db_mutex.unlock();
|
||||
Error("Can't run query: %s", mysql_error(&dbconn));
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
Debug(4, "Success running query: %s", query);
|
||||
MYSQL_RES *result = mysql_store_result(&dbconn);
|
||||
|
@ -124,7 +124,7 @@ zmDbRow *zmDbFetchOne(const char *query) {
|
|||
return row;
|
||||
}
|
||||
delete row;
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
MYSQL_RES *zmDbRow::fetch(const char *query) {
|
||||
|
@ -135,14 +135,14 @@ MYSQL_RES *zmDbRow::fetch(const char *query) {
|
|||
if ( n_rows != 1 ) {
|
||||
Error("Bogus number of lines return from query, %d returned for query %s.", n_rows, query);
|
||||
mysql_free_result(result_set);
|
||||
result_set = NULL;
|
||||
result_set = nullptr;
|
||||
return result_set;
|
||||
}
|
||||
|
||||
row = mysql_fetch_row(result_set);
|
||||
if ( ! row ) {
|
||||
mysql_free_result(result_set);
|
||||
result_set = NULL;
|
||||
result_set = nullptr;
|
||||
Error("Error getting row from query %s. Error is %s", query, mysql_error(&dbconn));
|
||||
} else {
|
||||
Debug(5, "Success");
|
||||
|
@ -153,6 +153,6 @@ MYSQL_RES *zmDbRow::fetch(const char *query) {
|
|||
zmDbRow::~zmDbRow() {
|
||||
if ( result_set ) {
|
||||
mysql_free_result(result_set);
|
||||
result_set = NULL;
|
||||
result_set = nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ class zmDbRow {
|
|||
MYSQL_RES *result_set;
|
||||
MYSQL_ROW row;
|
||||
public:
|
||||
zmDbRow() { result_set = NULL; row = NULL; };
|
||||
zmDbRow() { result_set = nullptr; row = nullptr; };
|
||||
MYSQL_RES *fetch( const char *query );
|
||||
zmDbRow( MYSQL_RES *, MYSQL_ROW *row );
|
||||
~zmDbRow();
|
||||
|
|
|
@ -55,7 +55,7 @@ Event::Event(
|
|||
cause(p_cause),
|
||||
noteSetMap(p_noteSetMap),
|
||||
videoEvent(p_videoEvent),
|
||||
videowriter(NULL)
|
||||
videowriter(nullptr)
|
||||
{
|
||||
|
||||
std::string notes;
|
||||
|
@ -222,19 +222,19 @@ Event::Event(
|
|||
Error("ZoneMinder was not compiled with the X264 MP4 video writer, check dependencies (x264 and mp4v2)");
|
||||
#endif
|
||||
|
||||
if ( videowriter != NULL ) {
|
||||
if ( videowriter != nullptr ) {
|
||||
/* Open the video stream */
|
||||
int nRet = videowriter->Open();
|
||||
if ( nRet != 0 ) {
|
||||
Error("Failed opening video stream");
|
||||
delete videowriter;
|
||||
videowriter = NULL;
|
||||
videowriter = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* No video object */
|
||||
videowriter = NULL;
|
||||
videowriter = nullptr;
|
||||
}
|
||||
} // Event::Event( Monitor *p_monitor, struct timeval p_start_time, const std::string &p_cause, const StringSetMap &p_noteSetMap, bool p_videoEvent )
|
||||
|
||||
|
@ -242,13 +242,13 @@ Event::~Event() {
|
|||
// We close the videowriter first, because if we finish the event, we might try to view the file, but we aren't done writing it yet.
|
||||
|
||||
/* Close the video file */
|
||||
if ( videowriter != NULL ) {
|
||||
if ( videowriter != nullptr ) {
|
||||
int nRet = videowriter->Close();
|
||||
if ( nRet != 0 ) {
|
||||
Error("Failed closing video stream");
|
||||
}
|
||||
delete videowriter;
|
||||
videowriter = NULL;
|
||||
videowriter = nullptr;
|
||||
}
|
||||
|
||||
struct DeltaTimeval delta_time;
|
||||
|
@ -347,7 +347,7 @@ bool Event::WriteFrameVideo(
|
|||
Image ts_image;
|
||||
|
||||
/* Checking for invalid parameters */
|
||||
if ( videow == NULL ) {
|
||||
if ( videow == nullptr ) {
|
||||
Error("NULL Video object");
|
||||
return false;
|
||||
}
|
||||
|
@ -509,7 +509,7 @@ void Event::AddFramesInternal(int n_frames, int start_frame, Image **images, str
|
|||
WriteFrameImage(images[i], *(timestamps[i]), snapshot_file.c_str());
|
||||
}
|
||||
|
||||
if ( videowriter != NULL ) {
|
||||
if ( videowriter != nullptr ) {
|
||||
WriteFrameVideo(images[i], *(timestamps[i]), videowriter);
|
||||
}
|
||||
|
||||
|
@ -646,7 +646,7 @@ void Event::AddFrame(Image *image, struct timeval timestamp, int score, Image *a
|
|||
}
|
||||
} // end if frame_type == ALARM
|
||||
|
||||
if ( videowriter != NULL ) {
|
||||
if ( videowriter != nullptr ) {
|
||||
WriteFrameVideo(image, timestamp, videowriter);
|
||||
}
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ class Event {
|
|||
void updateNotes( const StringSetMap &stringSetMap );
|
||||
|
||||
void AddFrames( int n_frames, Image **images, struct timeval **timestamps );
|
||||
void AddFrame( Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=NULL );
|
||||
void AddFrame( Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=nullptr );
|
||||
|
||||
private:
|
||||
void AddFramesInternal( int n_frames, int start_frame, Image **images, struct timeval **timestamps );
|
||||
|
@ -158,16 +158,16 @@ class Event {
|
|||
while ( pre_alarm_count > 0 ) {
|
||||
int i = pre_alarm_count - 1;
|
||||
delete pre_alarm_data[i].image;
|
||||
pre_alarm_data[i].image = NULL;
|
||||
pre_alarm_data[i].image = nullptr;
|
||||
if ( pre_alarm_data[i].alarm_frame ) {
|
||||
delete pre_alarm_data[i].alarm_frame;
|
||||
pre_alarm_data[i].alarm_frame = NULL;
|
||||
pre_alarm_data[i].alarm_frame = nullptr;
|
||||
}
|
||||
pre_alarm_count--;
|
||||
}
|
||||
pre_alarm_count = 0;
|
||||
}
|
||||
static void AddPreAlarmFrame(Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=NULL) {
|
||||
static void AddPreAlarmFrame(Image *image, struct timeval timestamp, int score=0, Image *alarm_frame=nullptr) {
|
||||
pre_alarm_data[pre_alarm_count].image = new Image(*image);
|
||||
pre_alarm_data[pre_alarm_count].timestamp = timestamp;
|
||||
pre_alarm_data[pre_alarm_count].score = score;
|
||||
|
|
|
@ -148,7 +148,7 @@ bool EventStream::loadEventData(uint64_t event_id) {
|
|||
|
||||
event_data->monitor_id = atoi(dbrow[0]);
|
||||
event_data->storage_id = dbrow[1] ? atoi(dbrow[1]) : 0;
|
||||
event_data->frame_count = dbrow[2] == NULL ? 0 : atoi(dbrow[2]);
|
||||
event_data->frame_count = dbrow[2] == nullptr ? 0 : atoi(dbrow[2]);
|
||||
event_data->start_time = atoi(dbrow[3]);
|
||||
event_data->duration = dbrow[4] ? atof(dbrow[4]) : 0.0;
|
||||
strncpy(event_data->video_file, dbrow[5], sizeof(event_data->video_file)-1);
|
||||
|
@ -160,8 +160,8 @@ bool EventStream::loadEventData(uint64_t event_id) {
|
|||
} else {
|
||||
event_data->scheme = Storage::SHALLOW;
|
||||
}
|
||||
event_data->SaveJPEGs = dbrow[7] == NULL ? 0 : atoi(dbrow[7]);
|
||||
event_data->Orientation = (Monitor::Orientation)(dbrow[8] == NULL ? 0 : atoi(dbrow[8]));
|
||||
event_data->SaveJPEGs = dbrow[7] == nullptr ? 0 : atoi(dbrow[7]);
|
||||
event_data->Orientation = (Monitor::Orientation)(dbrow[8] == nullptr ? 0 : atoi(dbrow[8]));
|
||||
mysql_free_result(result);
|
||||
|
||||
if ( !monitor ) {
|
||||
|
@ -303,7 +303,7 @@ bool EventStream::loadEventData(uint64_t event_id) {
|
|||
if ( 0 > ffmpeg_input->Open(filepath.c_str()) ) {
|
||||
Warning("Unable to open ffmpeg_input %s", filepath.c_str());
|
||||
delete ffmpeg_input;
|
||||
ffmpeg_input = NULL;
|
||||
ffmpeg_input = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -646,7 +646,7 @@ bool EventStream::sendFrame(int delta_us) {
|
|||
|
||||
static char filepath[PATH_MAX];
|
||||
static struct stat filestat;
|
||||
FILE *fdj = NULL;
|
||||
FILE *fdj = nullptr;
|
||||
|
||||
// This needs to be abstracted. If we are saving jpgs, then load the capture file.
|
||||
// If we are only saving analysis frames, then send that.
|
||||
|
@ -711,7 +711,7 @@ bool EventStream::sendFrame(int delta_us) {
|
|||
img_buffer_size = fread(img_buffer, 1, sizeof(temp_img_buffer), fdj);
|
||||
#endif
|
||||
} else {
|
||||
Image *image = NULL;
|
||||
Image *image = nullptr;
|
||||
|
||||
if ( filepath[0] ) {
|
||||
image = new Image(filepath);
|
||||
|
@ -790,7 +790,7 @@ bool EventStream::sendFrame(int delta_us) {
|
|||
break;
|
||||
}
|
||||
delete image;
|
||||
image = NULL;
|
||||
image = nullptr;
|
||||
} // end if send_raw or not
|
||||
|
||||
switch ( type ) {
|
||||
|
@ -870,7 +870,7 @@ void EventStream::runStream() {
|
|||
|
||||
Debug(3, "frame rate is: (%f)", (double)event_data->frame_count/event_data->duration);
|
||||
updateFrameRate((double)event_data->frame_count/event_data->duration);
|
||||
gettimeofday(&start, NULL);
|
||||
gettimeofday(&start, nullptr);
|
||||
uint64_t start_usec = start.tv_sec * 1000000 + start.tv_usec;
|
||||
uint64_t last_frame_offset = 0;
|
||||
|
||||
|
@ -878,7 +878,7 @@ void EventStream::runStream() {
|
|||
double time_to_event = 0;
|
||||
|
||||
while ( !zm_terminate ) {
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
|
||||
int delta_us = 0;
|
||||
send_frame = false;
|
||||
|
@ -982,7 +982,7 @@ void EventStream::runStream() {
|
|||
// +/- 1? What if we are skipping frames?
|
||||
curr_frame_id += (replay_rate>0) ? frame_mod : -1*frame_mod;
|
||||
// sending the frame may have taken some time, so reload now
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
uint64_t now_usec = (now.tv_sec * 1000000 + now.tv_usec);
|
||||
|
||||
// we incremented by replay_rate, so might have jumped past frame_count
|
||||
|
|
|
@ -97,33 +97,33 @@ class EventStream : public StreamBase {
|
|||
curr_frame_id(0),
|
||||
curr_stream_time(0.0),
|
||||
send_frame(false),
|
||||
event_data(0),
|
||||
storage(NULL),
|
||||
ffmpeg_input(NULL),
|
||||
event_data(nullptr),
|
||||
storage(nullptr),
|
||||
ffmpeg_input(nullptr),
|
||||
// Used when loading frames from an mp4
|
||||
input_codec_context(0),
|
||||
input_codec(0)
|
||||
input_codec_context(nullptr),
|
||||
input_codec(nullptr)
|
||||
{}
|
||||
~EventStream() {
|
||||
if ( event_data ) {
|
||||
if ( event_data->frames ) {
|
||||
delete[] event_data->frames;
|
||||
event_data->frames = NULL;
|
||||
event_data->frames = nullptr;
|
||||
}
|
||||
delete event_data;
|
||||
event_data = NULL;
|
||||
event_data = nullptr;
|
||||
}
|
||||
if ( monitor ) {
|
||||
delete monitor;
|
||||
monitor = NULL;
|
||||
monitor = nullptr;
|
||||
}
|
||||
if ( storage ) {
|
||||
delete storage;
|
||||
storage = NULL;
|
||||
storage = nullptr;
|
||||
}
|
||||
if ( ffmpeg_input ) {
|
||||
delete ffmpeg_input;
|
||||
ffmpeg_input = NULL;
|
||||
ffmpeg_input = nullptr;
|
||||
}
|
||||
}
|
||||
void setStreamStart( uint64_t init_event_id, unsigned int init_frame_id );
|
||||
|
|
|
@ -151,7 +151,7 @@ static int parse_key_value_pair(AVDictionary **pm, const char **buf,
|
|||
int flags)
|
||||
{
|
||||
char *key = av_get_token(buf, key_val_sep);
|
||||
char *val = NULL;
|
||||
char *val = nullptr;
|
||||
int ret;
|
||||
|
||||
if (key && *key && strspn(*buf, key_val_sep)) {
|
||||
|
@ -225,7 +225,7 @@ int hacked_up_context2_for_older_ffmpeg(AVFormatContext **avctx, AVOutputFormat
|
|||
AVFormatContext *s = avformat_alloc_context();
|
||||
int ret = 0;
|
||||
|
||||
*avctx = NULL;
|
||||
*avctx = nullptr;
|
||||
if (!s) {
|
||||
av_log(s, AV_LOG_ERROR, "Out of memory\n");
|
||||
ret = AVERROR(ENOMEM);
|
||||
|
@ -234,13 +234,13 @@ int hacked_up_context2_for_older_ffmpeg(AVFormatContext **avctx, AVOutputFormat
|
|||
|
||||
if (!oformat) {
|
||||
if (format) {
|
||||
oformat = av_guess_format(format, NULL, NULL);
|
||||
oformat = av_guess_format(format, nullptr, nullptr);
|
||||
if (!oformat) {
|
||||
av_log(s, AV_LOG_ERROR, "Requested output format '%s' is not a suitable output format\n", format);
|
||||
ret = AVERROR(EINVAL);
|
||||
}
|
||||
} else {
|
||||
oformat = av_guess_format(NULL, filename, NULL);
|
||||
oformat = av_guess_format(nullptr, filename, nullptr);
|
||||
if (!oformat) {
|
||||
ret = AVERROR(EINVAL);
|
||||
av_log(s, AV_LOG_ERROR, "Unable to find a suitable output format for '%s'\n", filename);
|
||||
|
@ -267,7 +267,7 @@ int hacked_up_context2_for_older_ffmpeg(AVFormatContext **avctx, AVOutputFormat
|
|||
ret = AVERROR(ENOMEM);
|
||||
return ret;
|
||||
}
|
||||
s->priv_data = NULL;
|
||||
s->priv_data = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -335,7 +335,7 @@ void zm_dump_stream_format(AVFormatContext *ic, int i, int index, int is_output)
|
|||
Debug(1, "Dumping stream index i(%d) index(%d)", i, index );
|
||||
int flags = (is_output ? ic->oformat->flags : ic->iformat->flags);
|
||||
AVStream *st = ic->streams[i];
|
||||
AVDictionaryEntry *lang = av_dict_get(st->metadata, "language", NULL, 0);
|
||||
AVDictionaryEntry *lang = av_dict_get(st->metadata, "language", nullptr, 0);
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
AVCodecParameters *codec = st->codecpar;
|
||||
#else
|
||||
|
@ -533,7 +533,7 @@ int zm_receive_packet(AVCodecContext *context, AVPacket &packet) {
|
|||
return 1;
|
||||
#else
|
||||
int got_packet = 0;
|
||||
int ret = avcodec_encode_audio2(context, &packet, NULL, &got_packet);
|
||||
int ret = avcodec_encode_audio2(context, &packet, nullptr, &got_packet);
|
||||
if ( ret < 0 ) {
|
||||
Error("Error encoding (%d) (%s)", ret, av_err2str(ret));
|
||||
}
|
||||
|
@ -719,7 +719,7 @@ int zm_resample_audio(
|
|||
Error("Flushing resampler not supported by AVRESAMPLE");
|
||||
return 0;
|
||||
}
|
||||
int ret = avresample_convert(resample_ctx, NULL, 0, 0, in_frame->data,
|
||||
int ret = avresample_convert(resample_ctx, nullptr, 0, 0, in_frame->data,
|
||||
0, in_frame->nb_samples);
|
||||
if ( ret < 0 ) {
|
||||
Error("Could not resample frame (error '%s')",
|
||||
|
|
|
@ -133,32 +133,32 @@ FfmpegCamera::FfmpegCamera(
|
|||
Initialise();
|
||||
}
|
||||
|
||||
mFormatContext = NULL;
|
||||
mFormatContext = nullptr;
|
||||
mVideoStreamId = -1;
|
||||
mAudioStreamId = -1;
|
||||
mVideoCodecContext = NULL;
|
||||
mAudioCodecContext = NULL;
|
||||
mVideoCodec = NULL;
|
||||
mAudioCodec = NULL;
|
||||
mRawFrame = NULL;
|
||||
mFrame = NULL;
|
||||
mVideoCodecContext = nullptr;
|
||||
mAudioCodecContext = nullptr;
|
||||
mVideoCodec = nullptr;
|
||||
mAudioCodec = nullptr;
|
||||
mRawFrame = nullptr;
|
||||
mFrame = nullptr;
|
||||
frameCount = 0;
|
||||
mCanCapture = false;
|
||||
videoStore = NULL;
|
||||
videoStore = nullptr;
|
||||
have_video_keyframe = false;
|
||||
packetqueue = NULL;
|
||||
packetqueue = nullptr;
|
||||
error_count = 0;
|
||||
use_hwaccel = true;
|
||||
#if HAVE_LIBAVUTIL_HWCONTEXT_H
|
||||
hwFrame = NULL;
|
||||
hw_device_ctx = NULL;
|
||||
hwFrame = nullptr;
|
||||
hw_device_ctx = nullptr;
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 89, 0, 89, 0)
|
||||
hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
mConvertContext = NULL;
|
||||
mConvertContext = nullptr;
|
||||
#endif
|
||||
/* Has to be located inside the constructor so other components such as zma
|
||||
* will receive correct colours and subpixel order */
|
||||
|
@ -175,7 +175,7 @@ FfmpegCamera::FfmpegCamera(
|
|||
Panic("Unexpected colours: %d", colours);
|
||||
}
|
||||
|
||||
frame_buffer = NULL;
|
||||
frame_buffer = nullptr;
|
||||
// sws_scale needs 32bit aligned width and an extra 16 bytes padding, so recalculate imagesize, which was width*height*bytes_per_pixel
|
||||
#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0)
|
||||
alignment = 32;
|
||||
|
@ -351,10 +351,10 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
|
||||
// Open the input, not necessarily a file
|
||||
#if !LIBAVFORMAT_VERSION_CHECK(53, 2, 0, 4, 0)
|
||||
if ( av_open_input_file(&mFormatContext, mPath.c_str(), NULL, 0, NULL) != 0 )
|
||||
if ( av_open_input_file(&mFormatContext, mPath.c_str(), nullptr, 0, nullptr) != 0 )
|
||||
#else
|
||||
// Handle options
|
||||
AVDictionary *opts = 0;
|
||||
AVDictionary *opts = nullptr;
|
||||
ret = av_dict_parse_string(&opts, Options().c_str(), "=", ",", 0);
|
||||
if ( ret < 0 ) {
|
||||
Warning("Could not parse ffmpeg input options '%s'", Options().c_str());
|
||||
|
@ -392,7 +392,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
mFormatContext->interrupt_callback.callback = FfmpegInterruptCallback;
|
||||
mFormatContext->interrupt_callback.opaque = this;
|
||||
|
||||
ret = avformat_open_input(&mFormatContext, mPath.c_str(), NULL, &opts);
|
||||
ret = avformat_open_input(&mFormatContext, mPath.c_str(), nullptr, &opts);
|
||||
if ( ret != 0 )
|
||||
#endif
|
||||
{
|
||||
|
@ -403,15 +403,15 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#else
|
||||
if ( mFormatContext ) {
|
||||
avformat_close_input(&mFormatContext);
|
||||
mFormatContext = NULL;
|
||||
mFormatContext = nullptr;
|
||||
}
|
||||
#endif
|
||||
av_dict_free(&opts);
|
||||
|
||||
return -1;
|
||||
}
|
||||
AVDictionaryEntry *e = NULL;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != NULL ) {
|
||||
AVDictionaryEntry *e = nullptr;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != nullptr ) {
|
||||
Warning("Option %s not recognized by ffmpeg", e->key);
|
||||
}
|
||||
av_dict_free(&opts);
|
||||
|
@ -419,7 +419,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#if !LIBAVFORMAT_VERSION_CHECK(53, 6, 0, 6, 0)
|
||||
ret = av_find_stream_info(mFormatContext);
|
||||
#else
|
||||
ret = avformat_find_stream_info(mFormatContext, 0);
|
||||
ret = avformat_find_stream_info(mFormatContext, nullptr);
|
||||
#endif
|
||||
if ( ret < 0 ) {
|
||||
Error("Unable to find stream info from %s due to: %s",
|
||||
|
@ -474,7 +474,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#endif
|
||||
|
||||
if ( mVideoCodecContext->codec_id == AV_CODEC_ID_H264 ) {
|
||||
if ( (mVideoCodec = avcodec_find_decoder_by_name("h264_mmal")) == NULL ) {
|
||||
if ( (mVideoCodec = avcodec_find_decoder_by_name("h264_mmal")) == nullptr ) {
|
||||
Debug(1, "Failed to find decoder (h264_mmal)");
|
||||
} else {
|
||||
Debug(1, "Success finding decoder (h264_mmal)");
|
||||
|
@ -540,7 +540,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
hw_pix_fmt, av_get_pix_fmt_name(hw_pix_fmt));
|
||||
|
||||
ret = av_hwdevice_ctx_create(&hw_device_ctx, type,
|
||||
(hwaccel_device != "" ? hwaccel_device.c_str(): NULL), NULL, 0);
|
||||
(hwaccel_device != "" ? hwaccel_device.c_str(): nullptr), nullptr, 0);
|
||||
if ( ret < 0 ) {
|
||||
Error("Failed to create hwaccel device. %s",av_make_error_string(ret).c_str());
|
||||
hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
|
@ -567,8 +567,8 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#else
|
||||
ret = avcodec_open2(mVideoCodecContext, mVideoCodec, &opts);
|
||||
#endif
|
||||
e = NULL;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != NULL ) {
|
||||
e = nullptr;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != nullptr ) {
|
||||
Warning("Option %s not recognized by ffmpeg", e->key);
|
||||
}
|
||||
if ( ret < 0 ) {
|
||||
|
@ -587,7 +587,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#else
|
||||
mFormatContext->streams[mAudioStreamId]->codec->codec_id
|
||||
#endif
|
||||
)) == NULL ) {
|
||||
)) == nullptr ) {
|
||||
Debug(1, "Can't find codec for audio stream from %s", mPath.c_str());
|
||||
} else {
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
|
@ -606,7 +606,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
#if !LIBAVFORMAT_VERSION_CHECK(53, 8, 0, 8, 0)
|
||||
if ( avcodec_open(mAudioCodecContext, mAudioCodec) < 0 ) {
|
||||
#else
|
||||
if ( avcodec_open2(mAudioCodecContext, mAudioCodec, 0) < 0 ) {
|
||||
if ( avcodec_open2(mAudioCodecContext, mAudioCodec, nullptr) < 0 ) {
|
||||
#endif
|
||||
Error("Unable to open codec for audio stream from %s", mPath.c_str());
|
||||
return -1;
|
||||
|
@ -621,7 +621,7 @@ int FfmpegCamera::OpenFfmpeg() {
|
|||
// Allocate space for the converted video frame
|
||||
mFrame = zm_av_frame_alloc();
|
||||
|
||||
if ( mRawFrame == NULL || mFrame == NULL ) {
|
||||
if ( mRawFrame == nullptr || mFrame == nullptr ) {
|
||||
Error("Unable to allocate frame for %s", mPath.c_str());
|
||||
return -1;
|
||||
}
|
||||
|
@ -670,29 +670,29 @@ int FfmpegCamera::Close() {
|
|||
|
||||
if ( mFrame ) {
|
||||
av_frame_free(&mFrame);
|
||||
mFrame = NULL;
|
||||
mFrame = nullptr;
|
||||
}
|
||||
if ( mRawFrame ) {
|
||||
av_frame_free(&mRawFrame);
|
||||
mRawFrame = NULL;
|
||||
mRawFrame = nullptr;
|
||||
}
|
||||
#if HAVE_LIBAVUTIL_HWCONTEXT_H
|
||||
if ( hwFrame ) {
|
||||
av_frame_free(&hwFrame);
|
||||
hwFrame = NULL;
|
||||
hwFrame = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
if ( mConvertContext ) {
|
||||
sws_freeContext(mConvertContext);
|
||||
mConvertContext = NULL;
|
||||
mConvertContext = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if ( videoStore ) {
|
||||
delete videoStore;
|
||||
videoStore = NULL;
|
||||
videoStore = nullptr;
|
||||
}
|
||||
|
||||
if ( mVideoCodecContext ) {
|
||||
|
@ -700,14 +700,14 @@ int FfmpegCamera::Close() {
|
|||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
// avcodec_free_context(&mVideoCodecContext);
|
||||
#endif
|
||||
mVideoCodecContext = NULL; // Freed by av_close_input_file
|
||||
mVideoCodecContext = nullptr; // Freed by av_close_input_file
|
||||
}
|
||||
if ( mAudioCodecContext ) {
|
||||
avcodec_close(mAudioCodecContext);
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
avcodec_free_context(&mAudioCodecContext);
|
||||
#endif
|
||||
mAudioCodecContext = NULL; // Freed by av_close_input_file
|
||||
mAudioCodecContext = nullptr; // Freed by av_close_input_file
|
||||
}
|
||||
|
||||
#if HAVE_LIBAVUTIL_HWCONTEXT_H
|
||||
|
@ -722,12 +722,12 @@ int FfmpegCamera::Close() {
|
|||
#else
|
||||
avformat_close_input(&mFormatContext);
|
||||
#endif
|
||||
mFormatContext = NULL;
|
||||
mFormatContext = nullptr;
|
||||
}
|
||||
|
||||
if ( packetqueue ) {
|
||||
delete packetqueue;
|
||||
packetqueue = NULL;
|
||||
packetqueue = nullptr;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
@ -823,7 +823,7 @@ int FfmpegCamera::CaptureAndRecord(
|
|||
} // end if video
|
||||
|
||||
delete videoStore;
|
||||
videoStore = NULL;
|
||||
videoStore = nullptr;
|
||||
have_video_keyframe = false;
|
||||
|
||||
monitor->SetVideoWriterEventId(0);
|
||||
|
@ -839,7 +839,7 @@ int FfmpegCamera::CaptureAndRecord(
|
|||
Debug(3, "Record Audio on but no audio stream found");
|
||||
videoStore = new VideoStore((const char *) event_file, "mp4",
|
||||
mFormatContext->streams[mVideoStreamId],
|
||||
NULL,
|
||||
nullptr,
|
||||
this->getMonitor());
|
||||
|
||||
} else {
|
||||
|
@ -855,13 +855,13 @@ int FfmpegCamera::CaptureAndRecord(
|
|||
}
|
||||
videoStore = new VideoStore((const char *) event_file, "mp4",
|
||||
mFormatContext->streams[mVideoStreamId],
|
||||
NULL,
|
||||
nullptr,
|
||||
this->getMonitor());
|
||||
} // end if record_audio
|
||||
|
||||
if ( !videoStore->open() ) {
|
||||
delete videoStore;
|
||||
videoStore = NULL;
|
||||
videoStore = nullptr;
|
||||
|
||||
} else {
|
||||
monitor->SetVideoWriterEventId(last_event_id);
|
||||
|
@ -920,7 +920,7 @@ int FfmpegCamera::CaptureAndRecord(
|
|||
if ( videoStore ) {
|
||||
Debug(1, "Deleting videoStore instance");
|
||||
delete videoStore;
|
||||
videoStore = NULL;
|
||||
videoStore = nullptr;
|
||||
have_video_keyframe = false;
|
||||
monitor->SetVideoWriterEventId(0);
|
||||
}
|
||||
|
@ -1091,7 +1091,7 @@ int FfmpegCamera::transfer_to_image(
|
|||
|
||||
/* Request a writeable buffer of the target image */
|
||||
image_buffer = image.WriteBuffer(width, height, colours, subpixelorder);
|
||||
if ( image_buffer == NULL ) {
|
||||
if ( image_buffer == nullptr ) {
|
||||
Error("Failed requesting writeable buffer for the captured image.");
|
||||
return -1;
|
||||
}
|
||||
|
@ -1128,9 +1128,9 @@ int FfmpegCamera::transfer_to_image(
|
|||
input_frame->height,
|
||||
(AVPixelFormat)input_frame->format,
|
||||
width, height,
|
||||
imagePixFormat, SWS_BICUBIC, NULL,
|
||||
NULL, NULL);
|
||||
if ( mConvertContext == NULL ) {
|
||||
imagePixFormat, SWS_BICUBIC, nullptr,
|
||||
nullptr, nullptr);
|
||||
if ( mConvertContext == nullptr ) {
|
||||
Error("Unable to create conversion context for %s from %s to %s",
|
||||
mPath.c_str(),
|
||||
av_get_pix_fmt_name((AVPixelFormat)input_frame->format),
|
||||
|
|
|
@ -64,7 +64,7 @@ class FfmpegCamera : public Camera {
|
|||
AVFrame *hwFrame; // Will also be used to indicate if hwaccel is in use
|
||||
bool use_hwaccel; //will default to on if hwaccel specified, will get turned off if there is a failure
|
||||
#if HAVE_LIBAVUTIL_HWCONTEXT_H
|
||||
AVBufferRef *hw_device_ctx = NULL;
|
||||
AVBufferRef *hw_device_ctx = nullptr;
|
||||
#endif
|
||||
|
||||
// Used to store the incoming packet, it will get copied when queued.
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
#include "zm_ffmpeg.h"
|
||||
|
||||
FFmpeg_Input::FFmpeg_Input() {
|
||||
input_format_context = NULL;
|
||||
input_format_context = nullptr;
|
||||
video_stream_id = -1;
|
||||
audio_stream_id = -1;
|
||||
FFMPEGInit();
|
||||
streams = NULL;
|
||||
frame = NULL;
|
||||
streams = nullptr;
|
||||
frame = nullptr;
|
||||
last_seek_request = -1;
|
||||
}
|
||||
|
||||
|
@ -17,14 +17,14 @@ FFmpeg_Input::~FFmpeg_Input() {
|
|||
if ( streams ) {
|
||||
for ( unsigned int i = 0; i < input_format_context->nb_streams; i += 1 ) {
|
||||
avcodec_close(streams[i].context);
|
||||
streams[i].context = NULL;
|
||||
streams[i].context = nullptr;
|
||||
}
|
||||
delete[] streams;
|
||||
streams = NULL;
|
||||
streams = nullptr;
|
||||
}
|
||||
if ( frame ) {
|
||||
av_frame_free(&frame);
|
||||
frame = NULL;
|
||||
frame = nullptr;
|
||||
}
|
||||
if ( input_format_context ) {
|
||||
#if !LIBAVFORMAT_VERSION_CHECK(53, 17, 0, 25, 0)
|
||||
|
@ -32,7 +32,7 @@ FFmpeg_Input::~FFmpeg_Input() {
|
|||
#else
|
||||
avformat_close_input(&input_format_context);
|
||||
#endif
|
||||
input_format_context = NULL;
|
||||
input_format_context = nullptr;
|
||||
}
|
||||
} // end ~FFmpeg_Input()
|
||||
|
||||
|
@ -41,16 +41,16 @@ int FFmpeg_Input::Open(const char *filepath) {
|
|||
int error;
|
||||
|
||||
/** Open the input file to read from it. */
|
||||
error = avformat_open_input(&input_format_context, filepath, NULL, NULL);
|
||||
error = avformat_open_input(&input_format_context, filepath, nullptr, nullptr);
|
||||
if ( error < 0 ) {
|
||||
Error("Could not open input file '%s' (error '%s')\n",
|
||||
filepath, av_make_error_string(error).c_str() );
|
||||
input_format_context = NULL;
|
||||
input_format_context = nullptr;
|
||||
return error;
|
||||
}
|
||||
|
||||
/** Get information on the input file (number of streams etc.). */
|
||||
if ( (error = avformat_find_stream_info(input_format_context, NULL)) < 0 ) {
|
||||
if ( (error = avformat_find_stream_info(input_format_context, nullptr)) < 0 ) {
|
||||
Error(
|
||||
"Could not open find stream info (error '%s')",
|
||||
av_make_error_string(error).c_str()
|
||||
|
@ -84,7 +84,7 @@ int FFmpeg_Input::Open(const char *filepath) {
|
|||
|
||||
streams[i].frame_count = 0;
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
streams[i].context = avcodec_alloc_context3(NULL);
|
||||
streams[i].context = avcodec_alloc_context3(nullptr);
|
||||
avcodec_parameters_to_context(streams[i].context, input_format_context->streams[i]->codecpar);
|
||||
#else
|
||||
streams[i].context = input_format_context->streams[i]->codec;
|
||||
|
@ -98,7 +98,7 @@ int FFmpeg_Input::Open(const char *filepath) {
|
|||
Debug(1, "Using codec (%s) for stream %d", streams[i].codec->name, i);
|
||||
}
|
||||
|
||||
error = avcodec_open2(streams[i].context, streams[i].codec, NULL);
|
||||
error = avcodec_open2(streams[i].context, streams[i].codec, nullptr);
|
||||
if ( error < 0 ) {
|
||||
Error("Could not open input codec (error '%s')",
|
||||
av_make_error_string(error).c_str());
|
||||
|
@ -106,7 +106,7 @@ int FFmpeg_Input::Open(const char *filepath) {
|
|||
avcodec_free_context(&streams[i].context);
|
||||
#endif
|
||||
avformat_close_input(&input_format_context);
|
||||
input_format_context = NULL;
|
||||
input_format_context = nullptr;
|
||||
return error;
|
||||
}
|
||||
} // end foreach stream
|
||||
|
@ -134,11 +134,11 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id) {
|
|||
(ret == -110)
|
||||
) {
|
||||
Info("av_read_frame returned %s.", av_make_error_string(ret).c_str());
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
Error("Unable to read packet from stream %d: error %d \"%s\".",
|
||||
packet.stream_index, ret, av_make_error_string(ret).c_str());
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
dumpPacket(input_format_context->streams[packet.stream_index], &packet, "Received packet");
|
||||
|
||||
|
@ -193,7 +193,7 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id, double at) {
|
|||
ret = av_seek_frame(input_format_context, stream_id, seek_target, AVSEEK_FLAG_FRAME);
|
||||
if ( ret < 0 ) {
|
||||
Error("Unable to seek in stream");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
// Have to grab a frame to update our current frame to know where we are
|
||||
get_frame(stream_id);
|
||||
|
@ -201,7 +201,7 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id, double at) {
|
|||
|
||||
if ( !frame ) {
|
||||
Warning("Unable to get frame.");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (
|
||||
|
@ -217,7 +217,7 @@ AVFrame *FFmpeg_Input::get_frame(int stream_id, double at) {
|
|||
AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME
|
||||
) < 0 ) ) {
|
||||
Error("Unable to seek in stream");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
// Have to grab a frame to update our current frame to know where we are
|
||||
get_frame(stream_id);
|
||||
|
|
|
@ -30,13 +30,13 @@
|
|||
#include "zm_fifo.h"
|
||||
#define RAW_BUFFER 512
|
||||
static bool zm_fifodbg_inited = false;
|
||||
FILE *zm_fifodbg_log_fd = 0;
|
||||
FILE *zm_fifodbg_log_fd = nullptr;
|
||||
char zm_fifodbg_log[PATH_MAX] = "";
|
||||
|
||||
static bool zmFifoDbgOpen() {
|
||||
if ( zm_fifodbg_log_fd )
|
||||
fclose(zm_fifodbg_log_fd);
|
||||
zm_fifodbg_log_fd = NULL;
|
||||
zm_fifodbg_log_fd = nullptr;
|
||||
signal(SIGPIPE, SIG_IGN);
|
||||
FifoStream::fifo_create_if_missing(zm_fifodbg_log);
|
||||
int fd = open(zm_fifodbg_log, O_WRONLY|O_NONBLOCK|O_TRUNC);
|
||||
|
@ -48,7 +48,7 @@ static bool zmFifoDbgOpen() {
|
|||
return false;
|
||||
}
|
||||
zm_fifodbg_log_fd = fdopen(fd, "wb");
|
||||
if ( zm_fifodbg_log_fd == NULL ) {
|
||||
if ( zm_fifodbg_log_fd == nullptr ) {
|
||||
close(fd);
|
||||
return false;
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ void zmFifoDbgOutput(
|
|||
int res = fwrite(dbg_string, dbg_ptr-dbg_string, 1, zm_fifodbg_log_fd);
|
||||
if ( res != 1 ) {
|
||||
fclose(zm_fifodbg_log_fd);
|
||||
zm_fifodbg_log_fd = NULL;
|
||||
zm_fifodbg_log_fd = nullptr;
|
||||
} else {
|
||||
fflush(zm_fifodbg_log_fd);
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ void FifoStream::runStream() {
|
|||
}
|
||||
|
||||
while ( !zm_terminate ) {
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
checkCommandQueue();
|
||||
if ( stream_type == MJPEG ) {
|
||||
if ( !sendMJEGFrames() )
|
||||
|
|
|
@ -88,7 +88,7 @@ int FileCamera::PreCapture() {
|
|||
// This waits until 1 second has passed since it was modified. Effectively limiting fps to 60.
|
||||
// Which is kinda bogus. If we were writing to this jpg constantly faster than we are monitoring it here
|
||||
// we would never break out of this loop
|
||||
while ( (time(0) - statbuf.st_mtime) < 1 ) {
|
||||
while ( (time(nullptr) - statbuf.st_mtime) < 1 ) {
|
||||
usleep(100000);
|
||||
}
|
||||
return 0;
|
||||
|
|
|
@ -48,12 +48,12 @@ static short *g_v_table;
|
|||
static short *g_u_table;
|
||||
static short *b_u_table;
|
||||
|
||||
struct SwsContext *sws_convert_context = NULL;
|
||||
struct SwsContext *sws_convert_context = nullptr;
|
||||
|
||||
jpeg_compress_struct *Image::writejpg_ccinfo[101] = { 0 };
|
||||
jpeg_compress_struct *Image::encodejpg_ccinfo[101] = { 0 };
|
||||
jpeg_decompress_struct *Image::readjpg_dcinfo = 0;
|
||||
jpeg_decompress_struct *Image::decodejpg_dcinfo = 0;
|
||||
jpeg_compress_struct *Image::writejpg_ccinfo[101] = { };
|
||||
jpeg_compress_struct *Image::encodejpg_ccinfo[101] = { };
|
||||
jpeg_decompress_struct *Image::readjpg_dcinfo = nullptr;
|
||||
jpeg_decompress_struct *Image::decodejpg_dcinfo = nullptr;
|
||||
struct zm_error_mgr Image::jpg_err;
|
||||
|
||||
/* Pointer to blend function. */
|
||||
|
@ -152,7 +152,7 @@ Image::Image(int p_width, int p_height, int p_colours, int p_subpixelorder, uint
|
|||
padding = p_padding;
|
||||
subpixelorder = p_subpixelorder;
|
||||
size = linesize*height + padding;
|
||||
buffer = 0;
|
||||
buffer = nullptr;
|
||||
holdbuffer = 0;
|
||||
if ( p_buffer ) {
|
||||
allocation = size;
|
||||
|
@ -177,7 +177,7 @@ Image::Image(int p_width, int p_linesize, int p_height, int p_colours, int p_sub
|
|||
padding = p_padding;
|
||||
subpixelorder = p_subpixelorder;
|
||||
size = linesize*height + padding;
|
||||
buffer = 0;
|
||||
buffer = nullptr;
|
||||
holdbuffer = 0;
|
||||
if ( p_buffer ) {
|
||||
allocation = size;
|
||||
|
@ -210,7 +210,7 @@ Image::Image(const AVFrame *frame) {
|
|||
size = avpicture_get_size(AV_PIX_FMT_RGBA, width, height);
|
||||
#endif
|
||||
|
||||
buffer = 0;
|
||||
buffer = nullptr;
|
||||
holdbuffer = 0;
|
||||
AllocImgBuffer(size);
|
||||
|
||||
|
@ -229,9 +229,9 @@ Image::Image(const AVFrame *frame) {
|
|||
height,
|
||||
(AVPixelFormat)frame->format,
|
||||
width, height,
|
||||
AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL,
|
||||
NULL, NULL);
|
||||
if ( sws_convert_context == NULL )
|
||||
AV_PIX_FMT_RGBA, SWS_BICUBIC, nullptr,
|
||||
nullptr, nullptr);
|
||||
if ( sws_convert_context == nullptr )
|
||||
Fatal("Unable to create conversion context");
|
||||
|
||||
if ( sws_scale(sws_convert_context, frame->data, frame->linesize, 0, frame->height,
|
||||
|
@ -254,7 +254,7 @@ Image::Image(const Image &p_image) {
|
|||
colours = p_image.colours;
|
||||
subpixelorder = p_image.subpixelorder;
|
||||
size = p_image.size; // allocation is set in AllocImgBuffer
|
||||
buffer = 0;
|
||||
buffer = nullptr;
|
||||
holdbuffer = 0;
|
||||
AllocImgBuffer(size);
|
||||
(*fptr_imgbufcpy)(buffer, p_image.buffer, size);
|
||||
|
@ -273,24 +273,24 @@ void Image::Deinitialise() {
|
|||
if ( readjpg_dcinfo ) {
|
||||
jpeg_destroy_decompress(readjpg_dcinfo);
|
||||
delete readjpg_dcinfo;
|
||||
readjpg_dcinfo = NULL;
|
||||
readjpg_dcinfo = nullptr;
|
||||
}
|
||||
if ( decodejpg_dcinfo ) {
|
||||
jpeg_destroy_decompress(decodejpg_dcinfo);
|
||||
delete decodejpg_dcinfo;
|
||||
decodejpg_dcinfo = NULL;
|
||||
decodejpg_dcinfo = nullptr;
|
||||
}
|
||||
for ( unsigned int quality=0; quality <= 100; quality += 1 ) {
|
||||
if ( writejpg_ccinfo[quality] ) {
|
||||
jpeg_destroy_compress(writejpg_ccinfo[quality]);
|
||||
delete writejpg_ccinfo[quality];
|
||||
writejpg_ccinfo[quality] = NULL;
|
||||
writejpg_ccinfo[quality] = nullptr;
|
||||
}
|
||||
} // end foreach quality
|
||||
|
||||
if ( sws_convert_context ) {
|
||||
sws_freeContext(sws_convert_context);
|
||||
sws_convert_context = NULL;
|
||||
sws_convert_context = nullptr;
|
||||
}
|
||||
} // end void Image::Deinitialise()
|
||||
|
||||
|
@ -501,25 +501,25 @@ uint8_t* Image::WriteBuffer(
|
|||
&&
|
||||
p_colours != ZM_COLOUR_RGB32 ) {
|
||||
Error("WriteBuffer called with unexpected colours: %d", p_colours);
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if ( ! ( p_height > 0 && p_width > 0 ) ) {
|
||||
Error("WriteBuffer called with invalid width or height: %d %d", p_width, p_height);
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if ( p_width != width || p_height != height || p_colours != colours || p_subpixelorder != subpixelorder ) {
|
||||
|
||||
unsigned int newsize = (p_width * p_height) * p_colours;
|
||||
|
||||
if ( buffer == NULL ) {
|
||||
if ( buffer == nullptr ) {
|
||||
AllocImgBuffer(newsize);
|
||||
} else {
|
||||
if ( allocation < newsize ) {
|
||||
if ( holdbuffer ) {
|
||||
Error("Held buffer is undersized for requested buffer");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
} else {
|
||||
/* Replace buffer with a bigger one */
|
||||
//DumpImgBuffer(); // Done in AllocImgBuffer too
|
||||
|
@ -552,7 +552,7 @@ void Image::AssignDirect(
|
|||
const size_t buffer_size,
|
||||
const int p_buffertype) {
|
||||
|
||||
if ( new_buffer == NULL ) {
|
||||
if ( new_buffer == nullptr ) {
|
||||
Error("Attempt to directly assign buffer from a NULL pointer");
|
||||
return;
|
||||
}
|
||||
|
@ -623,7 +623,7 @@ void Image::Assign(
|
|||
const size_t buffer_size) {
|
||||
unsigned int new_size = (p_width * p_height) * p_colours;
|
||||
|
||||
if ( new_buffer == NULL ) {
|
||||
if ( new_buffer == nullptr ) {
|
||||
Error("Attempt to assign buffer from a NULL pointer");
|
||||
return;
|
||||
}
|
||||
|
@ -672,7 +672,7 @@ void Image::Assign(
|
|||
void Image::Assign(const Image &image) {
|
||||
unsigned int new_size = image.height * image.linesize;
|
||||
|
||||
if ( image.buffer == NULL ) {
|
||||
if ( image.buffer == nullptr ) {
|
||||
Error("Attempt to assign image with an empty buffer");
|
||||
return;
|
||||
}
|
||||
|
@ -811,7 +811,7 @@ Image *Image::HighlightEdges(
|
|||
|
||||
bool Image::ReadRaw(const char *filename) {
|
||||
FILE *infile;
|
||||
if ( (infile = fopen(filename, "rb")) == NULL ) {
|
||||
if ( (infile = fopen(filename, "rb")) == nullptr ) {
|
||||
Error("Can't open %s: %s", filename, strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
@ -842,7 +842,7 @@ bool Image::ReadRaw(const char *filename) {
|
|||
|
||||
bool Image::WriteRaw(const char *filename) const {
|
||||
FILE *outfile;
|
||||
if ( (outfile = fopen(filename, "wb")) == NULL ) {
|
||||
if ( (outfile = fopen(filename, "wb")) == nullptr ) {
|
||||
Error("Can't open %s: %s", filename, strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
@ -871,7 +871,7 @@ bool Image::ReadJpeg(const char *filename, unsigned int p_colours, unsigned int
|
|||
}
|
||||
|
||||
FILE *infile;
|
||||
if ( (infile = fopen(filename, "rb")) == NULL ) {
|
||||
if ( (infile = fopen(filename, "rb")) == nullptr ) {
|
||||
Error("Can't open %s: %s", filename, strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
@ -895,7 +895,7 @@ bool Image::ReadJpeg(const char *filename, unsigned int p_colours, unsigned int
|
|||
|
||||
/* Check if the image has at least one huffman table defined. If not, use the standard ones */
|
||||
/* This is required for the MJPEG capture palette of USB devices */
|
||||
if ( cinfo->dc_huff_tbl_ptrs[0] == NULL ) {
|
||||
if ( cinfo->dc_huff_tbl_ptrs[0] == nullptr ) {
|
||||
zm_use_std_huff_tables(cinfo);
|
||||
}
|
||||
|
||||
|
@ -960,7 +960,7 @@ cinfo->out_color_space = JCS_RGB;
|
|||
break;
|
||||
} // end switch p_colours
|
||||
|
||||
if ( WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == NULL ) {
|
||||
if ( WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == nullptr ) {
|
||||
Error("Failed requesting writeable buffer for reading JPEG image.");
|
||||
jpeg_abort_decompress(cinfo);
|
||||
fclose(infile);
|
||||
|
@ -1011,7 +1011,7 @@ bool Image::WriteJpeg(const char *filename, int quality_override, struct timeval
|
|||
int quality = quality_override ? quality_override : config.jpeg_file_quality;
|
||||
|
||||
struct jpeg_compress_struct *cinfo = writejpg_ccinfo[quality];
|
||||
FILE *outfile = NULL;
|
||||
FILE *outfile = nullptr;
|
||||
static int raw_fd = 0;
|
||||
raw_fd = 0;
|
||||
|
||||
|
@ -1028,7 +1028,7 @@ bool Image::WriteJpeg(const char *filename, int quality_override, struct timeval
|
|||
jpg_err.pub.emit_message = zm_jpeg_emit_silence;
|
||||
if ( setjmp(jpg_err.setjmp_buffer) ) {
|
||||
jpeg_abort_compress(cinfo);
|
||||
Debug(1, "Aborted a write mid-stream and %s and %d", (outfile == NULL) ? "closing file" : "file not opened", raw_fd);
|
||||
Debug(1, "Aborted a write mid-stream and %s and %d", (outfile == nullptr) ? "closing file" : "file not opened", raw_fd);
|
||||
if ( raw_fd )
|
||||
close(raw_fd);
|
||||
if ( outfile )
|
||||
|
@ -1038,7 +1038,7 @@ bool Image::WriteJpeg(const char *filename, int quality_override, struct timeval
|
|||
}
|
||||
|
||||
if ( !on_blocking_abort ) {
|
||||
if ( (outfile = fopen(filename, "wb")) == NULL ) {
|
||||
if ( (outfile = fopen(filename, "wb")) == nullptr ) {
|
||||
Error("Can't open %s for writing: %s", filename, strerror(errno));
|
||||
return false;
|
||||
}
|
||||
|
@ -1047,7 +1047,7 @@ bool Image::WriteJpeg(const char *filename, int quality_override, struct timeval
|
|||
if ( raw_fd < 0 )
|
||||
return false;
|
||||
outfile = fdopen(raw_fd, "wb");
|
||||
if ( outfile == NULL ) {
|
||||
if ( outfile == nullptr ) {
|
||||
close(raw_fd);
|
||||
return false;
|
||||
}
|
||||
|
@ -1190,7 +1190,7 @@ bool Image::DecodeJpeg(
|
|||
|
||||
/* Check if the image has at least one huffman table defined. If not, use the standard ones */
|
||||
/* This is required for the MJPEG capture palette of USB devices */
|
||||
if ( cinfo->dc_huff_tbl_ptrs[0] == NULL ) {
|
||||
if ( cinfo->dc_huff_tbl_ptrs[0] == nullptr ) {
|
||||
zm_use_std_huff_tables(cinfo);
|
||||
}
|
||||
|
||||
|
@ -1256,7 +1256,7 @@ cinfo->out_color_space = JCS_RGB;
|
|||
break;
|
||||
} // end switch
|
||||
|
||||
if ( WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == NULL ) {
|
||||
if ( WriteBuffer(new_width, new_height, new_colours, new_subpixelorder) == nullptr ) {
|
||||
Error("Failed requesting writeable buffer for reading JPEG image.");
|
||||
jpeg_abort_decompress(cinfo);
|
||||
return false;
|
||||
|
@ -1799,7 +1799,7 @@ void Image::Delta( const Image &image, Image* targetimage) const {
|
|||
|
||||
uint8_t *pdiff = targetimage->WriteBuffer(width, height, ZM_COLOUR_GRAY8, ZM_SUBPIX_ORDER_NONE);
|
||||
|
||||
if ( pdiff == NULL ) {
|
||||
if ( pdiff == nullptr ) {
|
||||
Panic("Failed requesting writeable buffer for storing the delta image");
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ extern imgbufcpy_fptr_t fptr_imgbufcpy;
|
|||
/* Should be called from Image class functions */
|
||||
inline static uint8_t* AllocBuffer(size_t p_bufsize) {
|
||||
uint8_t* buffer = (uint8_t*)zm_mallocaligned(64, p_bufsize);
|
||||
if ( buffer == NULL )
|
||||
if ( buffer == nullptr )
|
||||
Fatal("Memory allocation failed: %s", strerror(errno));
|
||||
|
||||
return buffer;
|
||||
|
@ -122,7 +122,7 @@ protected:
|
|||
|
||||
inline void DumpImgBuffer() {
|
||||
DumpBuffer(buffer, buffertype);
|
||||
buffer = NULL;
|
||||
buffer = nullptr;
|
||||
allocation = 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -192,7 +192,7 @@ void zm_jpeg_mem_dest (j_compress_ptr cinfo, JOCTET *outbuffer, int *outbuffer_s
|
|||
* manager serially with the same JPEG object, because their private object
|
||||
* sizes may be different. Caveat programmer.
|
||||
*/
|
||||
if ( cinfo->dest == NULL )
|
||||
if ( cinfo->dest == nullptr )
|
||||
{
|
||||
/* first time for this JPEG object? */
|
||||
cinfo->dest = (struct jpeg_destination_mgr *)(*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT, SIZEOF(mem_destination_mgr));
|
||||
|
@ -369,7 +369,7 @@ void zm_jpeg_mem_src( j_decompress_ptr cinfo, const JOCTET *inbuffer, int inbuff
|
|||
* This makes it unsafe to use this manager and a different source
|
||||
* manager serially with the same JPEG object. Caveat programmer.
|
||||
*/
|
||||
if ( cinfo->src == NULL )
|
||||
if ( cinfo->src == nullptr )
|
||||
{
|
||||
/* first time for this JPEG object? */
|
||||
cinfo->src = (struct jpeg_source_mgr *)(*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT, SIZEOF(mem_source_mgr));
|
||||
|
@ -396,7 +396,7 @@ void zm_jpeg_mem_src( j_decompress_ptr cinfo, const JOCTET *inbuffer, int inbuff
|
|||
src->inbuffer = (JOCTET *)inbuffer;
|
||||
src->inbuffer_size = inbuffer_size;
|
||||
src->pub.bytes_in_buffer = 0; /* forces fill_input_buffer on first read */
|
||||
src->pub.next_input_byte = NULL; /* until buffer loaded */
|
||||
src->pub.next_input_byte = nullptr; /* until buffer loaded */
|
||||
}
|
||||
|
||||
void zm_use_std_huff_tables( j_decompress_ptr cinfo ) {
|
||||
|
|
|
@ -72,7 +72,7 @@ void* LibvlcLockBuffer(void* opaque, void** planes) {
|
|||
data->prevBuffer = buffer;
|
||||
|
||||
*planes = data->buffer;
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes) {
|
||||
|
@ -129,12 +129,12 @@ LibvlcCamera::LibvlcCamera(
|
|||
mMethod(p_method),
|
||||
mOptions(p_options)
|
||||
{
|
||||
mLibvlcInstance = NULL;
|
||||
mLibvlcMedia = NULL;
|
||||
mLibvlcMediaPlayer = NULL;
|
||||
mLibvlcData.buffer = NULL;
|
||||
mLibvlcData.prevBuffer = NULL;
|
||||
mOptArgV = NULL;
|
||||
mLibvlcInstance = nullptr;
|
||||
mLibvlcMedia = nullptr;
|
||||
mLibvlcMediaPlayer = nullptr;
|
||||
mLibvlcData.buffer = nullptr;
|
||||
mLibvlcData.prevBuffer = nullptr;
|
||||
mOptArgV = nullptr;
|
||||
|
||||
/* Has to be located inside the constructor so other components such as zma will receive correct colours and subpixel order */
|
||||
if ( colours == ZM_COLOUR_RGB32 ) {
|
||||
|
@ -163,19 +163,19 @@ LibvlcCamera::~LibvlcCamera() {
|
|||
if ( capture ) {
|
||||
Terminate();
|
||||
}
|
||||
if ( mLibvlcMediaPlayer != NULL ) {
|
||||
if ( mLibvlcMediaPlayer != nullptr ) {
|
||||
(*libvlc_media_player_release_f)(mLibvlcMediaPlayer);
|
||||
mLibvlcMediaPlayer = NULL;
|
||||
mLibvlcMediaPlayer = nullptr;
|
||||
}
|
||||
if ( mLibvlcMedia != NULL ) {
|
||||
if ( mLibvlcMedia != nullptr ) {
|
||||
(*libvlc_media_release_f)(mLibvlcMedia);
|
||||
mLibvlcMedia = NULL;
|
||||
mLibvlcMedia = nullptr;
|
||||
}
|
||||
if ( mLibvlcInstance != NULL ) {
|
||||
if ( mLibvlcInstance != nullptr ) {
|
||||
(*libvlc_release_f)(mLibvlcInstance);
|
||||
mLibvlcInstance = NULL;
|
||||
mLibvlcInstance = nullptr;
|
||||
}
|
||||
if ( mOptArgV != NULL ) {
|
||||
if ( mOptArgV != nullptr ) {
|
||||
delete[] mOptArgV;
|
||||
}
|
||||
}
|
||||
|
@ -188,12 +188,12 @@ void LibvlcCamera::Terminate() {
|
|||
(*libvlc_media_player_stop_f)(mLibvlcMediaPlayer);
|
||||
if ( mLibvlcData.buffer ) {
|
||||
zm_freealigned(mLibvlcData.buffer);
|
||||
mLibvlcData.buffer = NULL;
|
||||
mLibvlcData.buffer = nullptr;
|
||||
}
|
||||
|
||||
if ( mLibvlcData.prevBuffer ) {
|
||||
zm_freealigned(mLibvlcData.prevBuffer);
|
||||
mLibvlcData.prevBuffer = NULL;
|
||||
mLibvlcData.prevBuffer = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,27 +223,27 @@ int LibvlcCamera::PrimeCapture() {
|
|||
}
|
||||
|
||||
mLibvlcInstance = (*libvlc_new_f)(opVect.size(), (const char* const*)mOptArgV);
|
||||
if ( mLibvlcInstance == NULL ) {
|
||||
if ( mLibvlcInstance == nullptr ) {
|
||||
Error("Unable to create libvlc instance due to: %s", (*libvlc_errmsg_f)());
|
||||
return -1;
|
||||
}
|
||||
(*libvlc_log_set_f)(mLibvlcInstance, LibvlcCamera::log_callback, NULL);
|
||||
(*libvlc_log_set_f)(mLibvlcInstance, LibvlcCamera::log_callback, nullptr);
|
||||
|
||||
|
||||
mLibvlcMedia = (*libvlc_media_new_location_f)(mLibvlcInstance, mPath.c_str());
|
||||
if ( mLibvlcMedia == NULL ) {
|
||||
if ( mLibvlcMedia == nullptr ) {
|
||||
Error("Unable to open input %s due to: %s", mPath.c_str(), (*libvlc_errmsg_f)());
|
||||
return -1;
|
||||
}
|
||||
|
||||
mLibvlcMediaPlayer = (*libvlc_media_player_new_from_media_f)(mLibvlcMedia);
|
||||
if ( mLibvlcMediaPlayer == NULL ) {
|
||||
if ( mLibvlcMediaPlayer == nullptr ) {
|
||||
Error("Unable to create player for %s due to: %s", mPath.c_str(), (*libvlc_errmsg_f)());
|
||||
return -1;
|
||||
}
|
||||
|
||||
(*libvlc_video_set_format_f)(mLibvlcMediaPlayer, mTargetChroma.c_str(), width, height, width * mBpp);
|
||||
(*libvlc_video_set_callbacks_f)(mLibvlcMediaPlayer, &LibvlcLockBuffer, &LibvlcUnlockBuffer, NULL, &mLibvlcData);
|
||||
(*libvlc_video_set_callbacks_f)(mLibvlcMediaPlayer, &LibvlcLockBuffer, &LibvlcUnlockBuffer, nullptr, &mLibvlcData);
|
||||
|
||||
mLibvlcData.bufferSize = width * height * mBpp;
|
||||
// Libvlc wants 32 byte alignment for images (should in theory do this for all image lines)
|
||||
|
|
|
@ -50,7 +50,7 @@ static rfbCredential* GetCredentialsCallback(rfbClient* cl, int credentialType){
|
|||
rfbCredential *c = (rfbCredential *)malloc(sizeof(rfbCredential));
|
||||
if ( credentialType != rfbCredentialTypeUser ) {
|
||||
free(c);
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
c->userCredential.password = strdup((const char *)(*rfbClientGetClientData_f)(cl, &TAG_1));
|
||||
|
|
|
@ -302,10 +302,10 @@ LocalCamera::V4L1Data LocalCamera::v4l1_data;
|
|||
#endif // ZM_HAS_V4L1
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
AVFrame **LocalCamera::capturePictures = 0;
|
||||
AVFrame **LocalCamera::capturePictures = nullptr;
|
||||
#endif // HAVE_LIBSWSCALE
|
||||
|
||||
LocalCamera *LocalCamera::last_camera = NULL;
|
||||
LocalCamera *LocalCamera::last_camera = nullptr;
|
||||
|
||||
LocalCamera::LocalCamera(
|
||||
int p_id,
|
||||
|
@ -673,14 +673,14 @@ LocalCamera::LocalCamera(
|
|||
Fatal("Image size mismatch. Required: %d Available: %u", pSize, imagesize);
|
||||
}
|
||||
|
||||
imgConversionContext = sws_getContext(width, height, capturePixFormat, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL);
|
||||
imgConversionContext = sws_getContext(width, height, capturePixFormat, width, height, imagePixFormat, SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||
|
||||
if ( !imgConversionContext ) {
|
||||
Fatal("Unable to initialise image scaling context");
|
||||
}
|
||||
} else {
|
||||
tmpPicture = NULL;
|
||||
imgConversionContext = NULL;
|
||||
tmpPicture = nullptr;
|
||||
imgConversionContext = nullptr;
|
||||
}
|
||||
#endif
|
||||
} // end LocalCamera::LocalCamera
|
||||
|
@ -693,7 +693,7 @@ LocalCamera::~LocalCamera() {
|
|||
/* Clean up swscale stuff */
|
||||
if ( capture && conversion_type == 1 ) {
|
||||
sws_freeContext(imgConversionContext);
|
||||
imgConversionContext = NULL;
|
||||
imgConversionContext = nullptr;
|
||||
|
||||
av_frame_free(&tmpPicture);
|
||||
}
|
||||
|
@ -898,7 +898,7 @@ void LocalCamera::Initialise() {
|
|||
Fatal("Unable to query video buffer: %s", strerror(errno));
|
||||
|
||||
v4l2_data.buffers[i].length = vid_buf.length;
|
||||
v4l2_data.buffers[i].start = mmap(NULL, vid_buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, vid_fd, vid_buf.m.offset);
|
||||
v4l2_data.buffers[i].start = mmap(nullptr, vid_buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, vid_fd, vid_buf.m.offset);
|
||||
|
||||
if ( v4l2_data.buffers[i].start == MAP_FAILED )
|
||||
Fatal("Can't map video buffer %u (%u bytes) to memory: %s(%d)",
|
||||
|
@ -1991,7 +1991,7 @@ int LocalCamera::PrimeCapture() {
|
|||
if ( vidioctl(vid_fd, VIDIOC_QBUF, &vid_buf) < 0 )
|
||||
Fatal("Failed to queue buffer %d: %s", frame, strerror(errno));
|
||||
}
|
||||
v4l2_data.bufptr = NULL;
|
||||
v4l2_data.bufptr = nullptr;
|
||||
|
||||
Debug(3, "Starting video stream");
|
||||
//enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
|
@ -2023,7 +2023,7 @@ int LocalCamera::PreCapture() {
|
|||
|
||||
int LocalCamera::Capture(Image &image) {
|
||||
Debug(3, "Capturing");
|
||||
static uint8_t* buffer = NULL;
|
||||
static uint8_t* buffer = nullptr;
|
||||
int buffer_bytesused = 0;
|
||||
int capture_frame = -1;
|
||||
|
||||
|
@ -2116,7 +2116,7 @@ int LocalCamera::Capture(Image &image) {
|
|||
|
||||
/* Request a writeable buffer of the target image */
|
||||
uint8_t* directbuffer = image.WriteBuffer(width, height, colours, subpixelorder);
|
||||
if ( directbuffer == NULL ) {
|
||||
if ( directbuffer == nullptr ) {
|
||||
Error("Failed requesting writeable buffer for the captured image.");
|
||||
return -1;
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
#endif
|
||||
|
||||
bool Logger::smInitialised = false;
|
||||
Logger *Logger::smInstance = NULL;
|
||||
Logger *Logger::smInstance = nullptr;
|
||||
|
||||
Logger::StringMap Logger::smCodes;
|
||||
Logger::IntMap Logger::smSyslogPriorities;
|
||||
|
@ -74,7 +74,7 @@ Logger::Logger() :
|
|||
mDbConnected(false),
|
||||
mLogPath(staticConfig.PATH_LOGS.c_str()),
|
||||
//mLogFile( mLogPath+"/"+mId+".log" ),
|
||||
mLogFileFP(NULL),
|
||||
mLogFileFP(nullptr),
|
||||
mHasTerminal(false),
|
||||
mFlush(false) {
|
||||
|
||||
|
@ -413,7 +413,7 @@ void Logger::logFile(const std::string &logFile) {
|
|||
|
||||
void Logger::openFile() {
|
||||
if ( mLogFile.size() ) {
|
||||
if ( (mLogFileFP = fopen(mLogFile.c_str(), "a")) == (FILE *)NULL ) {
|
||||
if ( (mLogFileFP = fopen(mLogFile.c_str(), "a")) == nullptr ) {
|
||||
mFileLevel = NOLOG;
|
||||
Error("fopen() for %s, error = %s", mLogFile.c_str(), strerror(errno));
|
||||
}
|
||||
|
@ -426,10 +426,10 @@ void Logger::closeFile() {
|
|||
if ( mLogFileFP ) {
|
||||
fflush(mLogFileFP);
|
||||
if ( fclose(mLogFileFP) < 0 ) {
|
||||
mLogFileFP = (FILE *)NULL;
|
||||
mLogFileFP = nullptr;
|
||||
Error("fclose(), error = %s", strerror(errno));
|
||||
}
|
||||
mLogFileFP = (FILE *)NULL;
|
||||
mLogFileFP = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -464,7 +464,7 @@ void Logger::logPrint(bool hex, const char * const filepath, const int line, con
|
|||
if ( level < PANIC || level > DEBUG9 )
|
||||
Panic("Invalid logger level %d", level);
|
||||
|
||||
gettimeofday(&timeVal, NULL);
|
||||
gettimeofday(&timeVal, nullptr);
|
||||
|
||||
#if 0
|
||||
if ( logRuntime ) {
|
||||
|
@ -604,7 +604,7 @@ void Logger::logPrint(bool hex, const char * const filepath, const int line, con
|
|||
void logInit(const char *name, const Logger::Options &options) {
|
||||
if ( Logger::smInstance ) {
|
||||
delete Logger::smInstance;
|
||||
Logger::smInstance = NULL;
|
||||
Logger::smInstance = nullptr;
|
||||
}
|
||||
|
||||
Logger::smInstance = new Logger();
|
||||
|
@ -614,6 +614,6 @@ void logInit(const char *name, const Logger::Options &options) {
|
|||
void logTerm() {
|
||||
if ( Logger::smInstance ) {
|
||||
delete Logger::smInstance;
|
||||
Logger::smInstance = NULL;
|
||||
Logger::smInstance = nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,15 +27,15 @@ inline void* zm_mallocaligned(unsigned int reqalignment, size_t reqsize) {
|
|||
uint8_t* retptr;
|
||||
#if HAVE_POSIX_MEMALIGN
|
||||
if ( posix_memalign((void**)&retptr,reqalignment,reqsize) != 0 )
|
||||
return NULL;
|
||||
return nullptr;
|
||||
|
||||
return retptr;
|
||||
#else
|
||||
uint8_t* alloc;
|
||||
retptr = (uint8_t*)malloc(reqsize+reqalignment+sizeof(void*));
|
||||
|
||||
if ( retptr == NULL )
|
||||
return NULL;
|
||||
if ( retptr == nullptr )
|
||||
return nullptr;
|
||||
|
||||
alloc = retptr + sizeof(void*);
|
||||
|
||||
|
@ -60,7 +60,7 @@ inline void zm_freealigned(void* ptr) {
|
|||
|
||||
inline char *mempbrk(const char *s, const char *accept, size_t limit) {
|
||||
if ( limit == 0 || !s || !accept || !*accept )
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
unsigned int i,j;
|
||||
size_t acc_len = strlen(accept);
|
||||
|
@ -72,12 +72,12 @@ inline char *mempbrk(const char *s, const char *accept, size_t limit) {
|
|||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
inline char *memstr(const char *s, const char *n, size_t limit) {
|
||||
if ( limit == 0 || !s || !n )
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
if ( !*n )
|
||||
return (char *)s;
|
||||
|
@ -97,7 +97,7 @@ inline char *memstr(const char *s, const char *n, size_t limit) {
|
|||
break;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
inline size_t memspn(const char *s, const char *accept, size_t limit) {
|
||||
|
|
|
@ -108,9 +108,9 @@ std::vector<std::string> split(const std::string &s, char delim) {
|
|||
|
||||
Monitor::MonitorLink::MonitorLink(unsigned int p_id, const char *p_name) :
|
||||
id(p_id),
|
||||
shared_data(NULL),
|
||||
trigger_data(NULL),
|
||||
video_store_data(NULL)
|
||||
shared_data(nullptr),
|
||||
trigger_data(nullptr),
|
||||
video_store_data(nullptr)
|
||||
{
|
||||
strncpy(name, p_name, sizeof(name)-1);
|
||||
|
||||
|
@ -121,7 +121,7 @@ Monitor::MonitorLink::MonitorLink(unsigned int p_id, const char *p_name) :
|
|||
shm_id = 0;
|
||||
#endif // ZM_MEM_MAPPED
|
||||
mem_size = 0;
|
||||
mem_ptr = 0;
|
||||
mem_ptr = nullptr;
|
||||
|
||||
last_event = 0;
|
||||
last_state = IDLE;
|
||||
|
@ -135,8 +135,8 @@ Monitor::MonitorLink::~MonitorLink() {
|
|||
}
|
||||
|
||||
bool Monitor::MonitorLink::connect() {
|
||||
if ( !last_connect_time || (time(0) - last_connect_time) > 60 ) {
|
||||
last_connect_time = time(0);
|
||||
if ( !last_connect_time || (time(nullptr) - last_connect_time) > 60 ) {
|
||||
last_connect_time = time(nullptr);
|
||||
|
||||
mem_size = sizeof(SharedData) + sizeof(TriggerData);
|
||||
|
||||
|
@ -172,7 +172,7 @@ bool Monitor::MonitorLink::connect() {
|
|||
return false;
|
||||
}
|
||||
|
||||
mem_ptr = (unsigned char *)mmap(NULL, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED, map_fd, 0);
|
||||
mem_ptr = (unsigned char *)mmap(nullptr, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED, map_fd, 0);
|
||||
if ( mem_ptr == MAP_FAILED ) {
|
||||
Error("Can't map file %s (%d bytes) to memory: %s", mem_file, mem_size, strerror(errno));
|
||||
disconnect();
|
||||
|
@ -247,7 +247,7 @@ bool Monitor::MonitorLink::disconnect() {
|
|||
|
||||
#endif // ZM_MEM_MAPPED
|
||||
mem_size = 0;
|
||||
mem_ptr = 0;
|
||||
mem_ptr = nullptr;
|
||||
}
|
||||
return( true );
|
||||
}
|
||||
|
@ -368,10 +368,10 @@ Monitor::Monitor(
|
|||
camera( p_camera ),
|
||||
n_zones( p_n_zones ),
|
||||
zones( p_zones ),
|
||||
timestamps( 0 ),
|
||||
images( 0 ),
|
||||
privacy_bitmask( NULL ),
|
||||
event_delete_thread(NULL)
|
||||
timestamps( nullptr ),
|
||||
images( nullptr ),
|
||||
privacy_bitmask( nullptr ),
|
||||
event_delete_thread(nullptr)
|
||||
{
|
||||
if (analysis_fps > 0.0) {
|
||||
uint64_t usec = round(1000000*pre_event_count/analysis_fps);
|
||||
|
@ -440,7 +440,7 @@ Monitor::Monitor(
|
|||
(image_buffer_count*sizeof(struct timeval)),
|
||||
image_buffer_count, camera->ImageSize(), (image_buffer_count*camera->ImageSize()),
|
||||
mem_size);
|
||||
mem_ptr = NULL;
|
||||
mem_ptr = nullptr;
|
||||
|
||||
storage = new Storage(storage_id);
|
||||
Debug(1, "Storage path: %s", storage->Path());
|
||||
|
@ -516,7 +516,7 @@ Monitor::Monitor(
|
|||
videoRecording = ((GetOptVideoWriter() == H264PASSTHROUGH) && camera->SupportsNativeVideo());
|
||||
|
||||
n_linked_monitors = 0;
|
||||
linked_monitors = 0;
|
||||
linked_monitors = nullptr;
|
||||
|
||||
if ( purpose == ANALYSIS ) {
|
||||
while(
|
||||
|
@ -588,12 +588,12 @@ bool Monitor::connect() {
|
|||
|
||||
Debug(3, "MMap file size is %ld", map_stat.st_size);
|
||||
#ifdef MAP_LOCKED
|
||||
mem_ptr = (unsigned char *)mmap(NULL, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED|MAP_LOCKED, map_fd, 0);
|
||||
mem_ptr = (unsigned char *)mmap(nullptr, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED|MAP_LOCKED, map_fd, 0);
|
||||
if ( mem_ptr == MAP_FAILED ) {
|
||||
if ( errno == EAGAIN ) {
|
||||
Debug(1, "Unable to map file %s (%d bytes) to locked memory, trying unlocked", mem_file, mem_size);
|
||||
#endif
|
||||
mem_ptr = (unsigned char *)mmap(NULL, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED, map_fd, 0);
|
||||
mem_ptr = (unsigned char *)mmap(nullptr, mem_size, PROT_READ|PROT_WRITE, MAP_SHARED, map_fd, 0);
|
||||
Debug(1, "Mapped file %s (%d bytes) to unlocked memory", mem_file, mem_size);
|
||||
#ifdef MAP_LOCKED
|
||||
} else {
|
||||
|
@ -603,7 +603,7 @@ bool Monitor::connect() {
|
|||
#endif
|
||||
if ( mem_ptr == MAP_FAILED )
|
||||
Fatal("Can't map file %s (%d bytes) to memory: %s(%d)", mem_file, mem_size, strerror(errno), errno);
|
||||
if ( mem_ptr == NULL ) {
|
||||
if ( mem_ptr == nullptr ) {
|
||||
Error("mmap gave a NULL address:");
|
||||
} else {
|
||||
Debug(3, "mmapped to %p", mem_ptr);
|
||||
|
@ -670,19 +670,19 @@ Monitor::~Monitor() {
|
|||
delete linked_monitors[i];
|
||||
}
|
||||
delete[] linked_monitors;
|
||||
linked_monitors = 0;
|
||||
linked_monitors = nullptr;
|
||||
}
|
||||
if ( timestamps ) {
|
||||
delete[] timestamps;
|
||||
timestamps = 0;
|
||||
timestamps = nullptr;
|
||||
}
|
||||
if ( images ) {
|
||||
delete[] images;
|
||||
images = 0;
|
||||
images = nullptr;
|
||||
}
|
||||
if ( privacy_bitmask ) {
|
||||
delete[] privacy_bitmask;
|
||||
privacy_bitmask = NULL;
|
||||
privacy_bitmask = nullptr;
|
||||
}
|
||||
if ( mem_ptr ) {
|
||||
if ( event ) {
|
||||
|
@ -693,7 +693,7 @@ Monitor::~Monitor() {
|
|||
if ( event_delete_thread ) {
|
||||
event_delete_thread->join();
|
||||
delete event_delete_thread;
|
||||
event_delete_thread = NULL;
|
||||
event_delete_thread = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -776,9 +776,9 @@ void Monitor::AddZones( int p_n_zones, Zone *p_zones[] ) {
|
|||
void Monitor::AddPrivacyBitmask( Zone *p_zones[] ) {
|
||||
if ( privacy_bitmask ) {
|
||||
delete[] privacy_bitmask;
|
||||
privacy_bitmask = NULL;
|
||||
privacy_bitmask = nullptr;
|
||||
}
|
||||
Image *privacy_image = NULL;
|
||||
Image *privacy_image = nullptr;
|
||||
|
||||
for ( int i = 0; i < n_zones; i++ ) {
|
||||
if ( p_zones[i]->IsPrivacy() ) {
|
||||
|
@ -1139,7 +1139,7 @@ void Monitor::DumpZoneImage(const char *zone_string) {
|
|||
}
|
||||
}
|
||||
|
||||
Image *zone_image = NULL;
|
||||
Image *zone_image = nullptr;
|
||||
if ( ( (!staticConfig.SERVER_ID) || ( staticConfig.SERVER_ID == server_id ) ) && mem_ptr ) {
|
||||
Debug(3, "Trying to load from local zmc");
|
||||
int index = shared_data->last_write_index;
|
||||
|
@ -1158,7 +1158,7 @@ void Monitor::DumpZoneImage(const char *zone_string) {
|
|||
stream->setStreamStart(event_id, (unsigned int)1);
|
||||
zone_image = stream->getImage();
|
||||
delete stream;
|
||||
stream = NULL;
|
||||
stream = nullptr;
|
||||
} else {
|
||||
Error("Unable to load an event for monitor %d", id);
|
||||
return;
|
||||
|
@ -1294,7 +1294,7 @@ bool Monitor::Analyse() {
|
|||
}
|
||||
|
||||
struct timeval now;
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
|
||||
if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) {
|
||||
if ( now.tv_sec != last_fps_time ) {
|
||||
|
@ -1700,9 +1700,9 @@ bool Monitor::Analyse() {
|
|||
} // end foreach zone
|
||||
|
||||
if ( state == PREALARM ) {
|
||||
Event::AddPreAlarmFrame(snap_image, *timestamp, score, (got_anal_image?&alarm_image:NULL));
|
||||
Event::AddPreAlarmFrame(snap_image, *timestamp, score, (got_anal_image?&alarm_image:nullptr));
|
||||
} else {
|
||||
event->AddFrame(snap_image, *timestamp, score, (got_anal_image?&alarm_image:NULL));
|
||||
event->AddFrame(snap_image, *timestamp, score, (got_anal_image?&alarm_image:nullptr));
|
||||
}
|
||||
} else {
|
||||
// Not doing alarm frame storage
|
||||
|
@ -1848,8 +1848,8 @@ void Monitor::Reload() {
|
|||
min_section_length = atoi(dbrow[index++]);
|
||||
frame_skip = atoi(dbrow[index++]);
|
||||
motion_frame_skip = atoi(dbrow[index++]);
|
||||
analysis_fps = dbrow[index] ? strtod(dbrow[index], NULL) : 0; index++;
|
||||
analysis_update_delay = strtoul(dbrow[index++], NULL, 0);
|
||||
analysis_fps = dbrow[index] ? strtod(dbrow[index], nullptr) : 0; index++;
|
||||
analysis_update_delay = strtoul(dbrow[index++], nullptr, 0);
|
||||
|
||||
capture_max_fps = dbrow[index] ? atof(dbrow[index]) : 0.0; index++;
|
||||
capture_delay = ( capture_max_fps > 0.0 ) ? int(DT_PREC_3/capture_max_fps) : 0;
|
||||
|
@ -1882,7 +1882,7 @@ void Monitor::ReloadZones() {
|
|||
delete zones[i];
|
||||
}
|
||||
delete[] zones;
|
||||
zones = 0;
|
||||
zones = nullptr;
|
||||
n_zones = Zone::Load(this, zones);
|
||||
//DumpZoneImage();
|
||||
} // end void Monitor::ReloadZones()
|
||||
|
@ -1894,7 +1894,7 @@ void Monitor::ReloadLinkedMonitors(const char *p_linked_monitors) {
|
|||
delete linked_monitors[i];
|
||||
}
|
||||
delete[] linked_monitors;
|
||||
linked_monitors = 0;
|
||||
linked_monitors = nullptr;
|
||||
}
|
||||
|
||||
n_linked_monitors = 0;
|
||||
|
@ -2082,8 +2082,8 @@ Monitor *Monitor::Load(MYSQL_ROW dbrow, bool load_zones, Purpose purpose) {
|
|||
int enabled = dbrow[col] ? atoi(dbrow[col]) : 0; col++;
|
||||
const char *linked_monitors = dbrow[col];col++;
|
||||
|
||||
double analysis_fps = dbrow[col] ? strtod(dbrow[col], NULL) : 0; col++;
|
||||
unsigned int analysis_update_delay = strtoul(dbrow[col++], NULL, 0);
|
||||
double analysis_fps = dbrow[col] ? strtod(dbrow[col], nullptr) : 0; col++;
|
||||
unsigned int analysis_update_delay = strtoul(dbrow[col++], nullptr, 0);
|
||||
|
||||
double capture_max_fps = dbrow[col] ? atof(dbrow[col]) : 0.0; col++;
|
||||
double capture_delay = ( capture_max_fps > 0.0 ) ? int(DT_PREC_3/capture_max_fps) : 0;
|
||||
|
@ -2161,9 +2161,9 @@ Monitor *Monitor::Load(MYSQL_ROW dbrow, bool load_zones, Purpose purpose) {
|
|||
int track_motion = atoi(dbrow[col]); col++;
|
||||
bool embed_exif = (*dbrow[col] != '0'); col++;
|
||||
int signal_check_points = dbrow[col] ? atoi(dbrow[col]) : 0;col++;
|
||||
int signal_check_color = strtol(dbrow[col][0] == '#' ? dbrow[col]+1 : dbrow[col], 0, 16); col++;
|
||||
int signal_check_color = strtol(dbrow[col][0] == '#' ? dbrow[col]+1 : dbrow[col], nullptr, 16); col++;
|
||||
|
||||
Camera *camera = 0;
|
||||
Camera *camera = nullptr;
|
||||
if ( type == "Local" ) {
|
||||
|
||||
#if ZM_HAS_V4L
|
||||
|
@ -2412,7 +2412,7 @@ Monitor *Monitor::Load(unsigned int p_id, bool load_zones, Purpose purpose) {
|
|||
zmDbRow dbrow;
|
||||
if ( ! dbrow.fetch(sql.c_str()) ) {
|
||||
Error("Can't use query result: %s", mysql_error(&dbconn));
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
Monitor *monitor = Monitor::Load(dbrow.mysql_row(), load_zones, purpose);
|
||||
|
||||
|
@ -2521,7 +2521,7 @@ int Monitor::Capture() {
|
|||
if ( (index == shared_data->last_read_index) && (function > MONITOR) ) {
|
||||
Warning("Buffer overrun at index %d, image %d, slow down capture, speed up analysis or increase ring buffer size",
|
||||
index, image_count );
|
||||
time_t now = time(0);
|
||||
time_t now = time(nullptr);
|
||||
double approxFps = double(image_buffer_count)/double(now-image_buffer[index].timestamp->tv_sec);
|
||||
time_t last_read_delta = now - shared_data->last_read_time;
|
||||
if ( last_read_delta > (image_buffer_count/approxFps) ) {
|
||||
|
@ -2534,7 +2534,7 @@ int Monitor::Capture() {
|
|||
capture_image->MaskPrivacy(privacy_bitmask);
|
||||
|
||||
// Might be able to remove this call, when we start passing around ZMPackets, which will already have a timestamp
|
||||
gettimeofday(image_buffer[index].timestamp, NULL);
|
||||
gettimeofday(image_buffer[index].timestamp, nullptr);
|
||||
if ( config.timestamp_on_capture ) {
|
||||
TimestampImage(capture_image, image_buffer[index].timestamp);
|
||||
}
|
||||
|
@ -2642,23 +2642,23 @@ bool Monitor::closeEvent() {
|
|||
|
||||
if ( function == RECORD || function == MOCORD ) {
|
||||
//FIXME Is this neccessary? ENdTime should be set in the destructor
|
||||
gettimeofday(&(event->EndTime()), NULL);
|
||||
gettimeofday(&(event->EndTime()), nullptr);
|
||||
}
|
||||
if ( event_delete_thread ) {
|
||||
event_delete_thread->join();
|
||||
delete event_delete_thread;
|
||||
event_delete_thread = NULL;
|
||||
event_delete_thread = nullptr;
|
||||
}
|
||||
#if 0
|
||||
event_delete_thread = new std::thread([](Event *event) {
|
||||
Event * e = event;
|
||||
event = NULL;
|
||||
event = nullptr;
|
||||
delete e;
|
||||
e = NULL;
|
||||
e = nullptr;
|
||||
}, event);
|
||||
#else
|
||||
delete event;
|
||||
event = NULL;
|
||||
event = nullptr;
|
||||
#endif
|
||||
video_store_data->recording = (struct timeval){0};
|
||||
return true;
|
||||
|
|
|
@ -315,7 +315,7 @@ bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
|
|||
int img_buffer_size = 0;
|
||||
static unsigned char img_buffer[ZM_MAX_IMAGE_SIZE];
|
||||
|
||||
FILE *fdj = NULL;
|
||||
FILE *fdj = nullptr;
|
||||
if ( (fdj = fopen(filepath, "r")) ) {
|
||||
img_buffer_size = fread(img_buffer, 1, sizeof(img_buffer), fdj);
|
||||
fclose(fdj);
|
||||
|
@ -326,7 +326,7 @@ bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
|
|||
|
||||
// Calculate how long it takes to actually send the frame
|
||||
struct timeval frameStartTime;
|
||||
gettimeofday(&frameStartTime, NULL);
|
||||
gettimeofday(&frameStartTime, nullptr);
|
||||
|
||||
if (
|
||||
(0 > fprintf(stdout, "Content-Length: %d\r\nX-Timestamp: %d.%06d\r\n\r\n",
|
||||
|
@ -342,7 +342,7 @@ bool MonitorStream::sendFrame(const char *filepath, struct timeval *timestamp) {
|
|||
fflush(stdout);
|
||||
|
||||
struct timeval frameEndTime;
|
||||
gettimeofday(&frameEndTime, NULL);
|
||||
gettimeofday(&frameEndTime, nullptr);
|
||||
|
||||
int frameSendTime = tvDiffMsec(frameStartTime, frameEndTime);
|
||||
if ( frameSendTime > 1000/maxfps ) {
|
||||
|
@ -386,7 +386,7 @@ bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
|
|||
|
||||
// Calculate how long it takes to actually send the frame
|
||||
struct timeval frameStartTime;
|
||||
gettimeofday(&frameStartTime, NULL);
|
||||
gettimeofday(&frameStartTime, nullptr);
|
||||
|
||||
fputs("--ZoneMinderFrame\r\n", stdout);
|
||||
switch ( type ) {
|
||||
|
@ -430,7 +430,7 @@ bool MonitorStream::sendFrame(Image *image, struct timeval *timestamp) {
|
|||
fflush(stdout);
|
||||
|
||||
struct timeval frameEndTime;
|
||||
gettimeofday(&frameEndTime, NULL);
|
||||
gettimeofday(&frameEndTime, nullptr);
|
||||
|
||||
int frameSendTime = tvDiffMsec(frameStartTime, frameEndTime);
|
||||
if ( frameSendTime > 1000/maxfps ) {
|
||||
|
@ -470,7 +470,7 @@ void MonitorStream::runStream() {
|
|||
|
||||
frame_count = 0;
|
||||
|
||||
temp_image_buffer = 0;
|
||||
temp_image_buffer = nullptr;
|
||||
temp_image_buffer_count = playback_buffer;
|
||||
temp_read_index = temp_image_buffer_count;
|
||||
temp_write_index = temp_image_buffer_count;
|
||||
|
@ -479,7 +479,7 @@ void MonitorStream::runStream() {
|
|||
bool buffered_playback = false;
|
||||
|
||||
// Last image and timestamp when paused, will be resent occasionally to prevent timeout
|
||||
Image *paused_image = NULL;
|
||||
Image *paused_image = nullptr;
|
||||
struct timeval paused_timestamp;
|
||||
|
||||
if ( connkey && ( playback_buffer > 0 ) ) {
|
||||
|
@ -487,8 +487,8 @@ void MonitorStream::runStream() {
|
|||
const int max_swap_len_suffix = 15;
|
||||
|
||||
int swap_path_length = staticConfig.PATH_SWAP.length() + 1; // +1 for NULL terminator
|
||||
int subfolder1_length = snprintf(NULL, 0, "/zmswap-m%d", monitor->Id()) + 1;
|
||||
int subfolder2_length = snprintf(NULL, 0, "/zmswap-q%06d", connkey) + 1;
|
||||
int subfolder1_length = snprintf(nullptr, 0, "/zmswap-m%d", monitor->Id()) + 1;
|
||||
int subfolder2_length = snprintf(nullptr, 0, "/zmswap-q%06d", connkey) + 1;
|
||||
int total_swap_path_length = swap_path_length + subfolder1_length + subfolder2_length;
|
||||
|
||||
if ( total_swap_path_length + max_swap_len_suffix > PATH_MAX ) {
|
||||
|
@ -545,7 +545,7 @@ void MonitorStream::runStream() {
|
|||
break;
|
||||
}
|
||||
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
|
||||
bool was_paused = paused;
|
||||
if ( connkey ) {
|
||||
|
@ -571,7 +571,7 @@ void MonitorStream::runStream() {
|
|||
} else if ( paused_image ) {
|
||||
Debug(1, "Clearing paused_image");
|
||||
delete paused_image;
|
||||
paused_image = NULL;
|
||||
paused_image = nullptr;
|
||||
}
|
||||
|
||||
if ( buffered_playback && delayed ) {
|
||||
|
|
|
@ -59,7 +59,7 @@ class MonitorStream : public StreamBase {
|
|||
|
||||
public:
|
||||
MonitorStream() :
|
||||
temp_image_buffer(NULL), temp_image_buffer_count(0), temp_read_index(0), temp_write_index(0),
|
||||
temp_image_buffer(nullptr), temp_image_buffer_count(0), temp_read_index(0), temp_write_index(0),
|
||||
ttl(0), playback_buffer(0), delayed(false), frame_count(0) {
|
||||
}
|
||||
void setStreamBuffer(int p_playback_buffer) {
|
||||
|
|
|
@ -46,9 +46,9 @@ void VideoStream::Initialise( ) {
|
|||
|
||||
void VideoStream::SetupFormat( ) {
|
||||
/* allocate the output media context */
|
||||
ofc = NULL;
|
||||
ofc = nullptr;
|
||||
#if (LIBAVFORMAT_VERSION_CHECK(53, 2, 0, 2, 0) && (LIBAVFORMAT_VERSION_MICRO >= 100))
|
||||
avformat_alloc_output_context2(&ofc, NULL, format, filename);
|
||||
avformat_alloc_output_context2(&ofc, nullptr, format, filename);
|
||||
#else
|
||||
AVFormatContext *s = avformat_alloc_context();
|
||||
if ( !s ) {
|
||||
|
@ -59,18 +59,18 @@ void VideoStream::SetupFormat( ) {
|
|||
AVOutputFormat *oformat;
|
||||
if ( format ) {
|
||||
#if LIBAVFORMAT_VERSION_CHECK(52, 45, 0, 45, 0)
|
||||
oformat = av_guess_format(format, NULL, NULL);
|
||||
oformat = av_guess_format(format, nullptr, nullptr);
|
||||
#else
|
||||
oformat = guess_format(format, NULL, NULL);
|
||||
oformat = guess_format(format, nullptr, nullptr);
|
||||
#endif
|
||||
if ( !oformat ) {
|
||||
Fatal("Requested output format '%s' is not a suitable output format", format);
|
||||
}
|
||||
} else {
|
||||
#if LIBAVFORMAT_VERSION_CHECK(52, 45, 0, 45, 0)
|
||||
oformat = av_guess_format(NULL, filename, NULL);
|
||||
oformat = av_guess_format(nullptr, filename, nullptr);
|
||||
#else
|
||||
oformat = guess_format(NULL, filename, NULL);
|
||||
oformat = guess_format(nullptr, filename, nullptr);
|
||||
#endif
|
||||
if ( !oformat ) {
|
||||
Fatal("Unable to find a suitable output format for '%s'", format);
|
||||
|
@ -91,7 +91,7 @@ void VideoStream::SetupFormat( ) {
|
|||
#endif
|
||||
} else {
|
||||
Debug(1, "No allocating priv_data");
|
||||
s->priv_data = NULL;
|
||||
s->priv_data = nullptr;
|
||||
}
|
||||
|
||||
if ( filename ) {
|
||||
|
@ -172,7 +172,7 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
|
|||
|
||||
/* add the video streams using the default format codecs
|
||||
and initialize the codecs */
|
||||
ost = NULL;
|
||||
ost = nullptr;
|
||||
if ( codec_id != AV_CODEC_ID_NONE ) {
|
||||
codec = avcodec_find_encoder(codec_id);
|
||||
if ( !codec ) {
|
||||
|
@ -204,7 +204,7 @@ void VideoStream::SetupCodec( int colours, int subpixelorder, int width, int hei
|
|||
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
|
||||
codec_context = avcodec_alloc_context3(NULL);
|
||||
codec_context = avcodec_alloc_context3(nullptr);
|
||||
//avcodec_parameters_to_context(codec_context, ost->codecpar);
|
||||
#else
|
||||
codec_context = ost->codec;
|
||||
|
@ -296,7 +296,7 @@ bool VideoStream::OpenStream( ) {
|
|||
#if !LIBAVFORMAT_VERSION_CHECK(53, 8, 0, 8, 0)
|
||||
if ( (ret = avcodec_open(codec_context, codec)) < 0 )
|
||||
#else
|
||||
if ( (ret = avcodec_open2(codec_context, codec, 0)) < 0 )
|
||||
if ( (ret = avcodec_open2(codec_context, codec, nullptr)) < 0 )
|
||||
#endif
|
||||
{
|
||||
Error("Could not open codec. Error code %d \"%s\"", ret, av_err2str(ret));
|
||||
|
@ -338,7 +338,7 @@ bool VideoStream::OpenStream( ) {
|
|||
/* if the output format is not identical to the input format, then a temporary
|
||||
picture is needed too. It is then converted to the required
|
||||
output format */
|
||||
tmp_opicture = NULL;
|
||||
tmp_opicture = nullptr;
|
||||
if ( codec_context->pix_fmt != pf ) {
|
||||
#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101)
|
||||
tmp_opicture = av_frame_alloc( );
|
||||
|
@ -374,7 +374,7 @@ bool VideoStream::OpenStream( ) {
|
|||
/* open the output file, if needed */
|
||||
if ( !(of->flags & AVFMT_NOFILE) ) {
|
||||
#if LIBAVFORMAT_VERSION_CHECK(53, 15, 0, 21, 0)
|
||||
ret = avio_open2( &ofc->pb, filename, AVIO_FLAG_WRITE, NULL, NULL );
|
||||
ret = avio_open2( &ofc->pb, filename, AVIO_FLAG_WRITE, nullptr, nullptr );
|
||||
#elif LIBAVFORMAT_VERSION_CHECK(52, 102, 0, 102, 0)
|
||||
ret = avio_open( &ofc->pb, filename, AVIO_FLAG_WRITE );
|
||||
#else
|
||||
|
@ -391,7 +391,7 @@ bool VideoStream::OpenStream( ) {
|
|||
return false;
|
||||
}
|
||||
|
||||
video_outbuf = NULL;
|
||||
video_outbuf = nullptr;
|
||||
#if LIBAVFORMAT_VERSION_CHECK(57, 0, 0, 0, 0)
|
||||
if (codec_context->codec_type == AVMEDIA_TYPE_VIDEO &&
|
||||
codec_context->codec_id == AV_CODEC_ID_RAWVIDEO) {
|
||||
|
@ -403,7 +403,7 @@ bool VideoStream::OpenStream( ) {
|
|||
// TODO: Make buffer dynamic.
|
||||
video_outbuf_size = 4000000;
|
||||
video_outbuf = (uint8_t *)malloc( video_outbuf_size );
|
||||
if ( video_outbuf == NULL ) {
|
||||
if ( video_outbuf == nullptr ) {
|
||||
Fatal("Unable to malloc memory for outbuf");
|
||||
}
|
||||
}
|
||||
|
@ -417,7 +417,7 @@ bool VideoStream::OpenStream( ) {
|
|||
#if !LIBAVFORMAT_VERSION_CHECK(53, 2, 0, 4, 0)
|
||||
ret = av_write_header(ofc);
|
||||
#else
|
||||
ret = avformat_write_header(ofc, NULL);
|
||||
ret = avformat_write_header(ofc, nullptr);
|
||||
#endif
|
||||
|
||||
if ( ret < 0 ) {
|
||||
|
@ -430,16 +430,16 @@ bool VideoStream::OpenStream( ) {
|
|||
VideoStream::VideoStream( const char *in_filename, const char *in_format, int bitrate, double frame_rate, int colours, int subpixelorder, int width, int height ) :
|
||||
filename(in_filename),
|
||||
format(in_format),
|
||||
opicture(NULL),
|
||||
tmp_opicture(NULL),
|
||||
video_outbuf(NULL),
|
||||
opicture(nullptr),
|
||||
tmp_opicture(nullptr),
|
||||
video_outbuf(nullptr),
|
||||
video_outbuf_size(0),
|
||||
last_pts( -1 ),
|
||||
streaming_thread(0),
|
||||
do_streaming(true),
|
||||
add_timestamp(false),
|
||||
timestamp(0),
|
||||
buffer_copy(NULL),
|
||||
buffer_copy(nullptr),
|
||||
buffer_copy_lock(new pthread_mutex_t),
|
||||
buffer_copy_size(0),
|
||||
buffer_copy_used(0),
|
||||
|
@ -454,15 +454,15 @@ VideoStream::VideoStream( const char *in_filename, const char *in_format, int bi
|
|||
codec_and_format = new char[length+1];;
|
||||
strcpy( codec_and_format, format );
|
||||
format = codec_and_format;
|
||||
codec_name = NULL;
|
||||
codec_name = nullptr;
|
||||
char *f = strchr(codec_and_format, '/');
|
||||
if (f != NULL) {
|
||||
if (f != nullptr) {
|
||||
*f = 0;
|
||||
codec_name = f+1;
|
||||
}
|
||||
}
|
||||
|
||||
codec_context = NULL;
|
||||
codec_context = nullptr;
|
||||
SetupFormat( );
|
||||
SetupCodec( colours, subpixelorder, width, height, bitrate, frame_rate );
|
||||
SetParameters( );
|
||||
|
@ -474,7 +474,7 @@ VideoStream::VideoStream( const char *in_filename, const char *in_format, int bi
|
|||
packet_index = 0;
|
||||
|
||||
// Initialize mutex used by streaming thread.
|
||||
if ( pthread_mutex_init( buffer_copy_lock, NULL ) != 0 ) {
|
||||
if ( pthread_mutex_init( buffer_copy_lock, nullptr ) != 0 ) {
|
||||
Fatal("pthread_mutex_init failed");
|
||||
}
|
||||
|
||||
|
@ -494,7 +494,7 @@ VideoStream::~VideoStream( ) {
|
|||
pthread_join(streaming_thread, &thread_exit_code);
|
||||
}
|
||||
|
||||
if ( buffer_copy != NULL ) {
|
||||
if ( buffer_copy != nullptr ) {
|
||||
av_free( buffer_copy );
|
||||
}
|
||||
|
||||
|
@ -582,7 +582,7 @@ double VideoStream::EncodeFrame( const uint8_t *buffer, int buffer_size, bool _a
|
|||
Debug( 1, "Starting streaming thread" );
|
||||
|
||||
// Start a thread for streaming encoded video.
|
||||
if (pthread_create( &streaming_thread, NULL, StreamingThreadCallback, (void*) this) != 0){
|
||||
if (pthread_create( &streaming_thread, nullptr, StreamingThreadCallback, (void*) this) != 0){
|
||||
// Log a fatal error and exit the process.
|
||||
Fatal( "VideoStream failed to create streaming thread." );
|
||||
}
|
||||
|
@ -597,12 +597,12 @@ double VideoStream::ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size,
|
|||
|
||||
if ( codec_context->pix_fmt != pf ) {
|
||||
#ifdef HAVE_LIBSWSCALE
|
||||
static struct SwsContext *img_convert_ctx = 0;
|
||||
static struct SwsContext *img_convert_ctx = nullptr;
|
||||
#endif // HAVE_LIBSWSCALE
|
||||
memcpy( tmp_opicture->data[0], buffer, buffer_size );
|
||||
#ifdef HAVE_LIBSWSCALE
|
||||
if ( !img_convert_ctx ) {
|
||||
img_convert_ctx = sws_getCachedContext( NULL, codec_context->width, codec_context->height, pf, codec_context->width, codec_context->height, codec_context->pix_fmt, SWS_BICUBIC, NULL, NULL, NULL );
|
||||
img_convert_ctx = sws_getCachedContext( nullptr, codec_context->width, codec_context->height, pf, codec_context->width, codec_context->height, codec_context->pix_fmt, SWS_BICUBIC, nullptr, nullptr, nullptr );
|
||||
if ( !img_convert_ctx )
|
||||
Panic( "Unable to initialise image scaling context" );
|
||||
}
|
||||
|
@ -659,7 +659,7 @@ double VideoStream::ActuallyEncodeFrame( const uint8_t *buffer, int buffer_size,
|
|||
#else
|
||||
int out_size = avcodec_encode_video( codec_context, video_outbuf, video_outbuf_size, opicture_ptr );
|
||||
got_packet = out_size > 0 ? 1 : 0;
|
||||
pkt->data = got_packet ? video_outbuf : NULL;
|
||||
pkt->data = got_packet ? video_outbuf : nullptr;
|
||||
pkt->size = got_packet ? out_size : 0;
|
||||
#endif
|
||||
#endif
|
||||
|
@ -705,7 +705,7 @@ void *VideoStream::StreamingThreadCallback(void *ctx){
|
|||
|
||||
Debug( 1, "StreamingThreadCallback started" );
|
||||
|
||||
if (ctx == NULL) return NULL;
|
||||
if (ctx == nullptr) return nullptr;
|
||||
|
||||
VideoStream* videoStream = reinterpret_cast<VideoStream*>(ctx);
|
||||
|
||||
|
@ -765,7 +765,7 @@ void *VideoStream::StreamingThreadCallback(void *ctx){
|
|||
frame_count++;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
#endif // HAVE_LIBAVCODEC
|
||||
|
|
|
@ -25,18 +25,18 @@
|
|||
using namespace std;
|
||||
|
||||
ZMPacket::ZMPacket( AVPacket *p ) {
|
||||
frame = NULL;
|
||||
image = NULL;
|
||||
frame = nullptr;
|
||||
image = nullptr;
|
||||
av_init_packet( &packet );
|
||||
if ( zm_av_packet_ref( &packet, p ) < 0 ) {
|
||||
Error("error refing packet");
|
||||
}
|
||||
gettimeofday( ×tamp, NULL );
|
||||
gettimeofday( ×tamp, nullptr );
|
||||
}
|
||||
|
||||
ZMPacket::ZMPacket( AVPacket *p, struct timeval *t ) {
|
||||
frame = NULL;
|
||||
image = NULL;
|
||||
frame = nullptr;
|
||||
image = nullptr;
|
||||
av_init_packet( &packet );
|
||||
if ( zm_av_packet_ref( &packet, p ) < 0 ) {
|
||||
Error("error refing packet");
|
||||
|
|
|
@ -32,7 +32,7 @@ zm_packetqueue::zm_packetqueue( int p_max_stream_id ) {
|
|||
zm_packetqueue::~zm_packetqueue() {
|
||||
clearQueue();
|
||||
delete[] packet_counts;
|
||||
packet_counts = NULL;
|
||||
packet_counts = nullptr;
|
||||
}
|
||||
|
||||
bool zm_packetqueue::queuePacket(ZMPacket* zm_packet) {
|
||||
|
@ -116,7 +116,7 @@ bool zm_packetqueue::queuePacket(AVPacket* av_packet) {
|
|||
|
||||
ZMPacket* zm_packetqueue::popPacket( ) {
|
||||
if ( pktQueue.empty() ) {
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
ZMPacket *packet = pktQueue.front();
|
||||
|
@ -137,7 +137,7 @@ unsigned int zm_packetqueue::clearQueue(unsigned int frames_to_keep, int stream_
|
|||
}
|
||||
|
||||
std::list<ZMPacket *>::reverse_iterator it;
|
||||
ZMPacket *packet = NULL;
|
||||
ZMPacket *packet = nullptr;
|
||||
|
||||
for ( it = pktQueue.rbegin(); it != pktQueue.rend() && frames_to_keep; ++it ) {
|
||||
ZMPacket *zm_packet = *it;
|
||||
|
@ -186,13 +186,13 @@ unsigned int zm_packetqueue::clearQueue(unsigned int frames_to_keep, int stream_
|
|||
|
||||
delete_count += 1;
|
||||
}
|
||||
packet = NULL; // tidy up for valgrind
|
||||
packet = nullptr; // tidy up for valgrind
|
||||
Debug(3, "Deleted %d packets, %d remaining", delete_count, pktQueue.size());
|
||||
return delete_count;
|
||||
} // end unsigned int zm_packetqueue::clearQueue( unsigned int frames_to_keep, int stream_id )
|
||||
|
||||
void zm_packetqueue::clearQueue() {
|
||||
ZMPacket *packet = NULL;
|
||||
ZMPacket *packet = nullptr;
|
||||
int delete_count = 0;
|
||||
while ( !pktQueue.empty() ) {
|
||||
packet = pktQueue.front();
|
||||
|
@ -256,7 +256,7 @@ unsigned int zm_packetqueue::clearQueue(struct timeval *duration, int streamId)
|
|||
}
|
||||
|
||||
unsigned int deleted_frames = 0;
|
||||
ZMPacket *zm_packet = NULL;
|
||||
ZMPacket *zm_packet = nullptr;
|
||||
while (distance(it, pktQueue.rend()) > 1) {
|
||||
zm_packet = pktQueue.front();
|
||||
pktQueue.pop_front();
|
||||
|
@ -264,7 +264,7 @@ unsigned int zm_packetqueue::clearQueue(struct timeval *duration, int streamId)
|
|||
delete zm_packet;
|
||||
deleted_frames += 1;
|
||||
}
|
||||
zm_packet = NULL;
|
||||
zm_packet = nullptr;
|
||||
Debug(3, "Deleted %d frames", deleted_frames);
|
||||
|
||||
return deleted_frames;
|
||||
|
@ -366,7 +366,7 @@ void zm_packetqueue::clear_unwanted_packets(
|
|||
pktQueue.size() );
|
||||
|
||||
unsigned int deleted_frames = 0;
|
||||
ZMPacket *packet = NULL;
|
||||
ZMPacket *packet = nullptr;
|
||||
while ( distance(it, pktQueue.rend()) > 1 ) {
|
||||
//while ( pktQueue.rend() != it ) {
|
||||
packet = pktQueue.front();
|
||||
|
@ -375,7 +375,7 @@ void zm_packetqueue::clear_unwanted_packets(
|
|||
delete packet;
|
||||
deleted_frames += 1;
|
||||
}
|
||||
packet = NULL; // tidy up for valgrind
|
||||
packet = nullptr; // tidy up for valgrind
|
||||
|
||||
zm_packet = pktQueue.front();
|
||||
av_packet = &(zm_packet->packet);
|
||||
|
|
|
@ -61,7 +61,7 @@ protected:
|
|||
min_x = 0;
|
||||
max_x = 0;
|
||||
n_edges = 0;
|
||||
edges = 0;
|
||||
edges = nullptr;
|
||||
}
|
||||
~Slice() {
|
||||
delete edges;
|
||||
|
@ -83,7 +83,7 @@ protected:
|
|||
void calcCentre();
|
||||
|
||||
public:
|
||||
inline Polygon() : n_coords(0), coords(0), area(0), edges(0), slices(0) {
|
||||
inline Polygon() : n_coords(0), coords(nullptr), area(0), edges(nullptr), slices(nullptr) {
|
||||
}
|
||||
Polygon(int p_n_coords, const Coord *p_coords);
|
||||
Polygon(const Polygon &p_polygon);
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
|
||||
#if HAVE_LIBPCRE
|
||||
|
||||
RegExpr::RegExpr( const char *pattern, int flags, int p_max_matches ) : max_matches( p_max_matches ), match_buffers( 0 ), match_lengths( 0 ), match_valid( 0 )
|
||||
RegExpr::RegExpr( const char *pattern, int flags, int p_max_matches ) : max_matches( p_max_matches ), match_buffers( nullptr ), match_lengths( nullptr ), match_valid( nullptr )
|
||||
{
|
||||
const char *errstr;
|
||||
int erroffset = 0;
|
||||
|
@ -50,7 +50,7 @@ RegExpr::RegExpr( const char *pattern, int flags, int p_max_matches ) : max_matc
|
|||
match_valid = new bool[max_matches];
|
||||
memset( match_valid, 0, sizeof(*match_valid)*max_matches );
|
||||
} else {
|
||||
match_vectors = NULL;
|
||||
match_vectors = nullptr;
|
||||
}
|
||||
match_string = "";
|
||||
n_matches = 0;
|
||||
|
|
|
@ -44,20 +44,20 @@ RemoteCamera::RemoteCamera(
|
|||
path( p_path ),
|
||||
hp( 0 ),
|
||||
mNeedAuth(false),
|
||||
mAuthenticator(NULL)
|
||||
mAuthenticator(nullptr)
|
||||
{
|
||||
if ( path[0] != '/' )
|
||||
path = '/'+path;
|
||||
}
|
||||
|
||||
RemoteCamera::~RemoteCamera() {
|
||||
if ( hp != NULL ) {
|
||||
if ( hp != nullptr ) {
|
||||
freeaddrinfo(hp);
|
||||
hp = NULL;
|
||||
hp = nullptr;
|
||||
}
|
||||
if ( mAuthenticator ) {
|
||||
delete mAuthenticator;
|
||||
mAuthenticator = NULL;
|
||||
mAuthenticator = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,9 +99,9 @@ void RemoteCamera::Initialise() {
|
|||
if ( ret != 0 ) {
|
||||
Fatal( "Can't getaddrinfo(%s port %s): %s", host.c_str(), port.c_str(), gai_strerror(ret) );
|
||||
}
|
||||
struct addrinfo *p = NULL;
|
||||
struct addrinfo *p = nullptr;
|
||||
int addr_count = 0;
|
||||
for ( p = hp; p != NULL; p = p->ai_next ) {
|
||||
for ( p = hp; p != nullptr; p = p->ai_next ) {
|
||||
addr_count++;
|
||||
}
|
||||
Debug(1, "%d addresses returned", addr_count);
|
||||
|
|
|
@ -36,11 +36,11 @@
|
|||
#endif
|
||||
|
||||
#if HAVE_LIBPCRE
|
||||
static RegExpr *header_expr = 0;
|
||||
static RegExpr *status_expr = 0;
|
||||
static RegExpr *connection_expr = 0;
|
||||
static RegExpr *content_length_expr = 0;
|
||||
static RegExpr *content_type_expr = 0;
|
||||
static RegExpr *header_expr = nullptr;
|
||||
static RegExpr *status_expr = nullptr;
|
||||
static RegExpr *connection_expr = nullptr;
|
||||
static RegExpr *content_length_expr = nullptr;
|
||||
static RegExpr *content_type_expr = nullptr;
|
||||
#endif
|
||||
|
||||
RemoteCameraHttp::RemoteCameraHttp(
|
||||
|
@ -140,9 +140,9 @@ void RemoteCameraHttp::Initialise() {
|
|||
} // end void RemoteCameraHttp::Initialise()
|
||||
|
||||
int RemoteCameraHttp::Connect() {
|
||||
struct addrinfo *p = NULL;
|
||||
struct addrinfo *p = nullptr;
|
||||
|
||||
for ( p = hp; p != NULL; p = p->ai_next ) {
|
||||
for ( p = hp; p != nullptr; p = p->ai_next ) {
|
||||
sd = socket( p->ai_family, p->ai_socktype, p->ai_protocol );
|
||||
if ( sd < 0 ) {
|
||||
Warning("Can't create socket: %s", strerror(errno) );
|
||||
|
@ -165,7 +165,7 @@ int RemoteCameraHttp::Connect() {
|
|||
break;
|
||||
}
|
||||
|
||||
if ( p == NULL ) {
|
||||
if ( p == nullptr ) {
|
||||
Error("Unable to connect to the remote camera, aborting");
|
||||
return -1;
|
||||
}
|
||||
|
@ -207,7 +207,7 @@ int RemoteCameraHttp::ReadData( Buffer &buffer, unsigned int bytes_expected ) {
|
|||
|
||||
struct timeval temp_timeout = timeout;
|
||||
|
||||
int n_found = select(sd+1, &rfds, NULL, NULL, &temp_timeout);
|
||||
int n_found = select(sd+1, &rfds, nullptr, nullptr, &temp_timeout);
|
||||
if( n_found == 0 ) {
|
||||
Debug( 1, "Select timed out timeout was %d secs %d usecs", temp_timeout.tv_sec, temp_timeout.tv_usec );
|
||||
int error = 0;
|
||||
|
@ -293,10 +293,10 @@ int RemoteCameraHttp::ReadData( Buffer &buffer, unsigned int bytes_expected ) {
|
|||
}
|
||||
|
||||
int RemoteCameraHttp::GetData() {
|
||||
time_t start_time = time(NULL);
|
||||
time_t start_time = time(nullptr);
|
||||
int buffer_len = 0;
|
||||
while ( !( buffer_len = ReadData(buffer) ) ) {
|
||||
if ( zm_terminate || ( start_time - time(NULL) < ZM_WATCH_MAX_DELAY ))
|
||||
if ( zm_terminate || ( start_time - time(nullptr) < ZM_WATCH_MAX_DELAY ))
|
||||
return -1;
|
||||
Debug(4, "Timeout waiting for REGEXP HEADER");
|
||||
usleep(100000);
|
||||
|
@ -308,16 +308,16 @@ int RemoteCameraHttp::GetResponse() {
|
|||
int buffer_len;
|
||||
#if HAVE_LIBPCRE
|
||||
if ( method == REGEXP ) {
|
||||
const char *header = 0;
|
||||
const char *header = nullptr;
|
||||
int header_len = 0;
|
||||
const char *http_version = 0;
|
||||
const char *http_version = nullptr;
|
||||
int status_code = 0;
|
||||
const char *status_mesg = 0;
|
||||
const char *status_mesg = nullptr;
|
||||
const char *connection_type = "";
|
||||
int content_length = 0;
|
||||
const char *content_type = "";
|
||||
const char *content_boundary = "";
|
||||
const char *subheader = 0;
|
||||
const char *subheader = nullptr;
|
||||
int subheader_len = 0;
|
||||
//int subcontent_length = 0;
|
||||
//const char *subcontent_type = "";
|
||||
|
@ -450,9 +450,9 @@ int RemoteCameraHttp::GetResponse() {
|
|||
}
|
||||
case SUBHEADER :
|
||||
{
|
||||
static RegExpr *subheader_expr = 0;
|
||||
static RegExpr *subcontent_length_expr = 0;
|
||||
static RegExpr *subcontent_type_expr = 0;
|
||||
static RegExpr *subheader_expr = nullptr;
|
||||
static RegExpr *subcontent_length_expr = nullptr;
|
||||
static RegExpr *subcontent_type_expr = nullptr;
|
||||
|
||||
if ( !subheader_expr )
|
||||
{
|
||||
|
@ -641,11 +641,11 @@ int RemoteCameraHttp::GetResponse() {
|
|||
case HEADER :
|
||||
{
|
||||
n_headers = 0;
|
||||
http_header = 0;
|
||||
connection_header = 0;
|
||||
content_length_header = 0;
|
||||
content_type_header = 0;
|
||||
authenticate_header = 0;
|
||||
http_header = nullptr;
|
||||
connection_header = nullptr;
|
||||
content_length_header = nullptr;
|
||||
content_type_header = nullptr;
|
||||
authenticate_header = nullptr;
|
||||
|
||||
http_version[0] = '\0';
|
||||
status_code [0]= '\0';
|
||||
|
@ -665,7 +665,7 @@ int RemoteCameraHttp::GetResponse() {
|
|||
}
|
||||
bytes += buffer_len;
|
||||
|
||||
char *crlf = 0;
|
||||
char *crlf = nullptr;
|
||||
char *header_ptr = (char *)buffer;
|
||||
int header_len = buffer.size();
|
||||
bool all_headers = false;
|
||||
|
@ -870,7 +870,7 @@ int RemoteCameraHttp::GetResponse() {
|
|||
}
|
||||
case SUBHEADERCONT :
|
||||
{
|
||||
char *crlf = 0;
|
||||
char *crlf = nullptr;
|
||||
char *subheader_ptr = (char *)buffer;
|
||||
int subheader_len = buffer.size();
|
||||
bool all_headers = false;
|
||||
|
|
|
@ -64,18 +64,18 @@ RemoteCameraRtsp::RemoteCameraRtsp(
|
|||
Initialise();
|
||||
}
|
||||
|
||||
mFormatContext = NULL;
|
||||
mFormatContext = nullptr;
|
||||
mVideoStreamId = -1;
|
||||
mAudioStreamId = -1;
|
||||
mCodecContext = NULL;
|
||||
mCodec = NULL;
|
||||
mRawFrame = NULL;
|
||||
mFrame = NULL;
|
||||
mCodecContext = nullptr;
|
||||
mCodec = nullptr;
|
||||
mRawFrame = nullptr;
|
||||
mFrame = nullptr;
|
||||
frameCount = 0;
|
||||
startTime=0;
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
mConvertContext = NULL;
|
||||
mConvertContext = nullptr;
|
||||
#endif
|
||||
/* Has to be located inside the constructor so other components such as zma will receive correct colours and subpixel order */
|
||||
if ( colours == ZM_COLOUR_RGB32 ) {
|
||||
|
@ -99,13 +99,13 @@ RemoteCameraRtsp::~RemoteCameraRtsp() {
|
|||
#if HAVE_LIBSWSCALE
|
||||
if ( mConvertContext ) {
|
||||
sws_freeContext(mConvertContext);
|
||||
mConvertContext = NULL;
|
||||
mConvertContext = nullptr;
|
||||
}
|
||||
#endif
|
||||
|
||||
if ( mCodecContext ) {
|
||||
avcodec_close(mCodecContext);
|
||||
mCodecContext = NULL; // Freed by avformat_free_context in the destructor of RtspThread class
|
||||
mCodecContext = nullptr; // Freed by avformat_free_context in the destructor of RtspThread class
|
||||
}
|
||||
|
||||
if ( capture ) {
|
||||
|
@ -144,7 +144,7 @@ int RemoteCameraRtsp::Disconnect() {
|
|||
rtspThread->stop();
|
||||
rtspThread->join();
|
||||
delete rtspThread;
|
||||
rtspThread = 0;
|
||||
rtspThread = nullptr;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ int RemoteCameraRtsp::PrimeCapture() {
|
|||
|
||||
// Find the decoder for the video stream
|
||||
mCodec = avcodec_find_decoder(mCodecContext->codec_id);
|
||||
if ( mCodec == NULL )
|
||||
if ( mCodec == nullptr )
|
||||
Panic("Unable to locate codec %d decoder", mCodecContext->codec_id);
|
||||
|
||||
// Open codec
|
||||
|
@ -214,7 +214,7 @@ int RemoteCameraRtsp::PrimeCapture() {
|
|||
// Allocate space for the converted video frame
|
||||
mFrame = zm_av_frame_alloc();
|
||||
|
||||
if ( mRawFrame == NULL || mFrame == NULL )
|
||||
if ( mRawFrame == nullptr || mFrame == nullptr )
|
||||
Fatal("Unable to allocate frame(s)");
|
||||
|
||||
#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0)
|
||||
|
@ -261,7 +261,7 @@ int RemoteCameraRtsp::Capture( Image &image ) {
|
|||
|
||||
/* Request a writeable buffer of the target image */
|
||||
directbuffer = image.WriteBuffer(width, height, colours, subpixelorder);
|
||||
if ( directbuffer == NULL ) {
|
||||
if ( directbuffer == nullptr ) {
|
||||
Error("Failed requesting writeable buffer for the captured image.");
|
||||
return -1;
|
||||
}
|
||||
|
@ -343,12 +343,12 @@ int RemoteCameraRtsp::Capture( Image &image ) {
|
|||
#endif
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
if ( mConvertContext == NULL ) {
|
||||
if ( mConvertContext == nullptr ) {
|
||||
mConvertContext = sws_getContext(
|
||||
mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt,
|
||||
width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL);
|
||||
width, height, imagePixFormat, SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||
|
||||
if ( mConvertContext == NULL )
|
||||
if ( mConvertContext == nullptr )
|
||||
Fatal("Unable to create conversion context");
|
||||
|
||||
if (
|
||||
|
|
|
@ -272,12 +272,12 @@ int RtpCtrlThread::run() {
|
|||
|
||||
unsigned char buffer[ZM_NETWORK_BUFSIZ];
|
||||
|
||||
time_t last_receive = time(NULL);
|
||||
time_t last_receive = time(nullptr);
|
||||
bool timeout = false; // used as a flag that we had a timeout, and then sent an RR to see if we wake back up. Real timeout will happen when this is true.
|
||||
|
||||
while ( !mStop && select.wait() >= 0 ) {
|
||||
|
||||
time_t now = time(NULL);
|
||||
time_t now = time(nullptr);
|
||||
Select::CommsList readable = select.getReadable();
|
||||
if ( readable.size() == 0 ) {
|
||||
if ( ! timeout ) {
|
||||
|
@ -300,7 +300,7 @@ int RtpCtrlThread::run() {
|
|||
}
|
||||
} else {
|
||||
timeout = false;
|
||||
last_receive = time(NULL);
|
||||
last_receive = time(nullptr);
|
||||
}
|
||||
for ( Select::CommsList::iterator iter = readable.begin(); iter != readable.end(); ++iter ) {
|
||||
if ( UdpInetSocket *socket = dynamic_cast<UdpInetSocket *>(*iter) ) {
|
||||
|
|
|
@ -198,14 +198,14 @@ RtspThread::~RtspThread() {
|
|||
#else
|
||||
av_free_format_context(mFormatContext);
|
||||
#endif
|
||||
mFormatContext = NULL;
|
||||
mFormatContext = nullptr;
|
||||
}
|
||||
if ( mSessDesc ) {
|
||||
delete mSessDesc;
|
||||
mSessDesc = NULL;
|
||||
mSessDesc = nullptr;
|
||||
}
|
||||
delete mAuthenticator;
|
||||
mAuthenticator = NULL;
|
||||
mAuthenticator = nullptr;
|
||||
}
|
||||
|
||||
int RtspThread::run() {
|
||||
|
@ -387,7 +387,7 @@ int RtspThread::run() {
|
|||
if ( !mAuth.empty() )
|
||||
authUrl.insert( authUrl.find( "://" )+3, mAuth+"@" );
|
||||
|
||||
if ( av_open_input_file( &mFormatContext, authUrl.c_str(), NULL, 0, NULL ) != 0 )
|
||||
if ( av_open_input_file( &mFormatContext, authUrl.c_str(), nullptr, 0, nullptr ) != 0 )
|
||||
{
|
||||
Error( "Unable to open input '%s'", authUrl.c_str() );
|
||||
return( -1 );
|
||||
|
@ -499,26 +499,26 @@ int RtspThread::run() {
|
|||
method = "RTP/UNICAST";
|
||||
StringVector subparts = split( parts[i], "=" );
|
||||
StringVector ports = split( subparts[1], "-" );
|
||||
remotePorts[0] = strtol( ports[0].c_str(), NULL, 10 );
|
||||
remotePorts[1] = strtol( ports[1].c_str(), NULL, 10 );
|
||||
remotePorts[0] = strtol( ports[0].c_str(), nullptr, 10 );
|
||||
remotePorts[1] = strtol( ports[1].c_str(), nullptr, 10 );
|
||||
} else if ( startsWith( parts[i], "interleaved=" ) ) {
|
||||
method = "RTP/RTSP";
|
||||
StringVector subparts = split( parts[i], "=" );
|
||||
StringVector channels = split( subparts[1], "-" );
|
||||
remoteChannels[0] = strtol( channels[0].c_str(), NULL, 10 );
|
||||
remoteChannels[1] = strtol( channels[1].c_str(), NULL, 10 );
|
||||
remoteChannels[0] = strtol( channels[0].c_str(), nullptr, 10 );
|
||||
remoteChannels[1] = strtol( channels[1].c_str(), nullptr, 10 );
|
||||
} else if ( startsWith( parts[i], "port=" ) ) {
|
||||
method = "RTP/MULTICAST";
|
||||
StringVector subparts = split( parts[i], "=" );
|
||||
StringVector ports = split( subparts[1], "-" );
|
||||
localPorts[0] = strtol( ports[0].c_str(), NULL, 10 );
|
||||
localPorts[1] = strtol( ports[1].c_str(), NULL, 10 );
|
||||
localPorts[0] = strtol( ports[0].c_str(), nullptr, 10 );
|
||||
localPorts[1] = strtol( ports[1].c_str(), nullptr, 10 );
|
||||
} else if ( startsWith( parts[i], "destination=" ) ) {
|
||||
StringVector subparts = split( parts[i], "=" );
|
||||
localHost = subparts[1];
|
||||
} else if ( startsWith( parts[i], "ssrc=" ) ) {
|
||||
StringVector subparts = split( parts[i], "=" );
|
||||
ssrc = strtoll( subparts[1].c_str(), NULL, 16 );
|
||||
ssrc = strtoll( subparts[1].c_str(), nullptr, 16 );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -568,10 +568,10 @@ int RtspThread::run() {
|
|||
for ( size_t j = 0; j < parts.size(); j++ ) {
|
||||
if ( startsWith( parts[j], "seq=" ) ) {
|
||||
StringVector subparts = split( parts[j], "=" );
|
||||
seq = strtol( subparts[1].c_str(), NULL, 10 );
|
||||
seq = strtol( subparts[1].c_str(), nullptr, 10 );
|
||||
} else if ( startsWith( parts[j], "rtptime=" ) ) {
|
||||
StringVector subparts = split( parts[j], "=" );
|
||||
rtpTime = strtol( subparts[1].c_str(), NULL, 10 );
|
||||
rtpTime = strtol( subparts[1].c_str(), nullptr, 10 );
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -582,7 +582,7 @@ int RtspThread::run() {
|
|||
Debug( 2, "RTSP Seq is %d", seq );
|
||||
Debug( 2, "RTSP Rtptime is %ld", rtpTime );
|
||||
|
||||
time_t lastKeepalive = time(NULL);
|
||||
time_t lastKeepalive = time(nullptr);
|
||||
time_t now;
|
||||
message = "GET_PARAMETER "+mUrl+" RTSP/1.0\r\nSession: "+session+"\r\n";
|
||||
|
||||
|
@ -598,7 +598,7 @@ int RtspThread::run() {
|
|||
rtpCtrlThread.start();
|
||||
|
||||
while( !mStop ) {
|
||||
now = time(NULL);
|
||||
now = time(nullptr);
|
||||
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
|
||||
Debug(5, "sendkeepalive %d, timeout %d, now: %d last: %d since: %d",
|
||||
sendKeepalive, timeout, now, lastKeepalive, (now-lastKeepalive) );
|
||||
|
@ -721,7 +721,7 @@ int RtspThread::run() {
|
|||
}
|
||||
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
|
||||
// FIXME: Is this really necessary when using tcp ?
|
||||
now = time(NULL);
|
||||
now = time(nullptr);
|
||||
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
|
||||
Debug(5, "sendkeepalive %d, timeout %d, now: %d last: %d since: %d", sendKeepalive, timeout, now, lastKeepalive, (now-lastKeepalive) );
|
||||
if ( sendKeepalive && (timeout > 0) && ((now-lastKeepalive) > (timeout-5)) )
|
||||
|
@ -761,10 +761,10 @@ Debug(5, "sendkeepalive %d, timeout %d, now: %d last: %d since: %d", sendKeepali
|
|||
|
||||
while ( !mStop ) {
|
||||
// Send a keepalive message if the server supports this feature and we are close to the timeout expiration
|
||||
if ( sendKeepalive && (timeout > 0) && ((time(NULL)-lastKeepalive) > (timeout-5)) ) {
|
||||
if ( sendKeepalive && (timeout > 0) && ((time(nullptr)-lastKeepalive) > (timeout-5)) ) {
|
||||
if ( !sendCommand( message ) )
|
||||
return -1;
|
||||
lastKeepalive = time(NULL);
|
||||
lastKeepalive = time(nullptr);
|
||||
}
|
||||
usleep(100000);
|
||||
}
|
||||
|
|
|
@ -152,16 +152,16 @@ SessionDescriptor::MediaDescriptor::MediaDescriptor(
|
|||
mWidth( 0 ),
|
||||
mHeight( 0 ),
|
||||
mSprops( "" ),
|
||||
mConnInfo( 0 )
|
||||
mConnInfo( nullptr )
|
||||
{
|
||||
}
|
||||
|
||||
SessionDescriptor::SessionDescriptor( const std::string &url, const std::string &sdp ) :
|
||||
mUrl( url ),
|
||||
mConnInfo( 0 ),
|
||||
mBandInfo( 0 )
|
||||
mConnInfo( nullptr ),
|
||||
mBandInfo( nullptr )
|
||||
{
|
||||
MediaDescriptor *currMedia = 0;
|
||||
MediaDescriptor *currMedia = nullptr;
|
||||
|
||||
StringVector lines = split( sdp, "\r\n" );
|
||||
for ( StringVector::const_iterator iter = lines.begin(); iter != lines.end(); ++iter ) {
|
||||
|
@ -344,12 +344,12 @@ AVFormatContext *SessionDescriptor::generateFormatContext() const {
|
|||
#if !LIBAVFORMAT_VERSION_CHECK(53, 10, 0, 17, 0)
|
||||
AVStream *stream = av_new_stream(formatContext, i);
|
||||
#else
|
||||
AVStream *stream = avformat_new_stream(formatContext, NULL);
|
||||
AVStream *stream = avformat_new_stream(formatContext, nullptr);
|
||||
stream->id = i;
|
||||
#endif
|
||||
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
AVCodecContext *codec_context = avcodec_alloc_context3(NULL);
|
||||
AVCodecContext *codec_context = avcodec_alloc_context3(nullptr);
|
||||
avcodec_parameters_to_context(codec_context, stream->codecpar);
|
||||
stream->codec = codec_context;
|
||||
#else
|
||||
|
@ -434,7 +434,7 @@ AVFormatContext *SessionDescriptor::generateFormatContext() const {
|
|||
if ( codec_context->codec_id == AV_CODEC_ID_H264 && mediaDesc->getSprops().size()) {
|
||||
uint8_t start_sequence[]= { 0, 0, 1 };
|
||||
codec_context->extradata_size= 0;
|
||||
codec_context->extradata= NULL;
|
||||
codec_context->extradata= nullptr;
|
||||
char pvalue[1024], *value = pvalue;
|
||||
|
||||
strcpy(pvalue, mediaDesc->getSprops().c_str());
|
||||
|
|
|
@ -211,7 +211,7 @@ public:
|
|||
MediaDescriptor *getStream( int index )
|
||||
{
|
||||
if ( index < 0 || (unsigned int)index >= mMediaList.size() )
|
||||
return( 0 );
|
||||
return nullptr;
|
||||
return( mMediaList[index] );
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ int zm_sendfile(int out_fd, int in_fd, off_t *offset, size_t size) {
|
|||
#include <sys/uio.h>
|
||||
int zm_sendfile(int out_fd, int in_fd, off_t *offset, off_t size) {
|
||||
int err;
|
||||
err = sendfile(in_fd, out_fd, *offset, size, NULL, &size, 0);
|
||||
err = sendfile(in_fd, out_fd, *offset, size, nullptr, &size, 0);
|
||||
if (err && errno != EAGAIN)
|
||||
return -errno;
|
||||
|
||||
|
|
|
@ -53,8 +53,8 @@ RETSIGTYPE zm_die_handler(int signal)
|
|||
#if (defined(__i386__) || defined(__x86_64__))
|
||||
// Get more information if available
|
||||
#if ( HAVE_SIGINFO_T && HAVE_UCONTEXT_T )
|
||||
void *ip = 0;
|
||||
void *cr2 = 0;
|
||||
void *ip = nullptr;
|
||||
void *cr2 = nullptr;
|
||||
if (info && context) {
|
||||
|
||||
Debug(1,
|
||||
|
|
|
@ -33,7 +33,7 @@ StreamBase::~StreamBase() {
|
|||
#if HAVE_LIBAVCODEC
|
||||
if ( vid_stream ) {
|
||||
delete vid_stream;
|
||||
vid_stream = NULL;
|
||||
vid_stream = nullptr;
|
||||
}
|
||||
#endif
|
||||
closeComms();
|
||||
|
@ -351,7 +351,7 @@ void StreamBase::openComms() {
|
|||
strncpy(rem_addr.sun_path, rem_sock_path, sizeof(rem_addr.sun_path));
|
||||
rem_addr.sun_family = AF_UNIX;
|
||||
|
||||
gettimeofday(&last_comm_update, NULL);
|
||||
gettimeofday(&last_comm_update, nullptr);
|
||||
} // end if connKey > 0
|
||||
Debug(3, "comms open at %s", loc_sock_path);
|
||||
} // end void StreamBase::openComms()
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "zm_swscale.h"
|
||||
|
||||
#if HAVE_LIBSWSCALE && HAVE_LIBAVUTIL
|
||||
SWScale::SWScale() : gotdefaults(false), swscale_ctx(NULL), input_avframe(NULL), output_avframe(NULL) {
|
||||
SWScale::SWScale() : gotdefaults(false), swscale_ctx(nullptr), input_avframe(nullptr), output_avframe(nullptr) {
|
||||
Debug(4,"SWScale object created");
|
||||
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ bool SWScale::init() {
|
|||
#else
|
||||
input_avframe = avcodec_alloc_frame();
|
||||
#endif
|
||||
if ( input_avframe == NULL ) {
|
||||
if ( input_avframe == nullptr ) {
|
||||
Error("Failed allocating AVFrame for the input");
|
||||
return false;
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ bool SWScale::init() {
|
|||
#else
|
||||
output_avframe = avcodec_alloc_frame();
|
||||
#endif
|
||||
if ( output_avframe == NULL ) {
|
||||
if ( output_avframe == nullptr ) {
|
||||
Error("Failed allocating AVFrame for the output");
|
||||
return false;
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ SWScale::~SWScale() {
|
|||
|
||||
if ( swscale_ctx ) {
|
||||
sws_freeContext(swscale_ctx);
|
||||
swscale_ctx = NULL;
|
||||
swscale_ctx = nullptr;
|
||||
}
|
||||
|
||||
Debug(4,"SWScale object destroyed");
|
||||
|
@ -86,7 +86,7 @@ int SWScale::SetDefaults(enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf,
|
|||
|
||||
int SWScale::Convert(const uint8_t* in_buffer, const size_t in_buffer_size, uint8_t* out_buffer, const size_t out_buffer_size, enum _AVPIXELFORMAT in_pf, enum _AVPIXELFORMAT out_pf, unsigned int width, unsigned int height, unsigned int new_width, unsigned int new_height) {
|
||||
/* Parameter checking */
|
||||
if(in_buffer == NULL || out_buffer == NULL) {
|
||||
if(in_buffer == nullptr || out_buffer == nullptr) {
|
||||
Error("NULL Input or output buffer");
|
||||
return -1;
|
||||
}
|
||||
|
@ -131,8 +131,8 @@ int SWScale::Convert(const uint8_t* in_buffer, const size_t in_buffer_size, uint
|
|||
}
|
||||
|
||||
/* Get the context */
|
||||
swscale_ctx = sws_getCachedContext( swscale_ctx, width, height, in_pf, new_width, new_height, out_pf, SWS_FAST_BILINEAR, NULL, NULL, NULL );
|
||||
if(swscale_ctx == NULL) {
|
||||
swscale_ctx = sws_getCachedContext( swscale_ctx, width, height, in_pf, new_width, new_height, out_pf, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr );
|
||||
if(swscale_ctx == nullptr) {
|
||||
Error("Failed getting swscale context");
|
||||
return -6;
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
struct timespec getTimeout( int secs ) {
|
||||
struct timespec timeout;
|
||||
struct timeval temp_timeout;
|
||||
gettimeofday(&temp_timeout, 0);
|
||||
gettimeofday(&temp_timeout, nullptr);
|
||||
timeout.tv_sec = temp_timeout.tv_sec + secs;
|
||||
timeout.tv_nsec = temp_timeout.tv_usec*1000;
|
||||
return timeout;
|
||||
|
@ -39,7 +39,7 @@ struct timespec getTimeout( int secs ) {
|
|||
struct timespec getTimeout( double secs ) {
|
||||
struct timespec timeout;
|
||||
struct timeval temp_timeout;
|
||||
gettimeofday( &temp_timeout, 0 );
|
||||
gettimeofday( &temp_timeout, nullptr );
|
||||
timeout.tv_sec = temp_timeout.tv_sec + int(secs);
|
||||
timeout.tv_nsec = temp_timeout.tv_usec += (long int)(1000000000.0*(secs-int(secs)));
|
||||
if ( timeout.tv_nsec > 1000000000 ) {
|
||||
|
@ -50,7 +50,7 @@ struct timespec getTimeout( double secs ) {
|
|||
}
|
||||
|
||||
Mutex::Mutex() {
|
||||
if ( pthread_mutex_init(&mMutex, NULL) < 0 )
|
||||
if ( pthread_mutex_init(&mMutex, nullptr) < 0 )
|
||||
Error("Unable to create pthread mutex: %s", strerror(errno));
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ RecursiveMutex::RecursiveMutex() {
|
|||
}
|
||||
|
||||
Condition::Condition( Mutex &mutex ) : mMutex( mutex ) {
|
||||
if ( pthread_cond_init( &mCondition, NULL ) < 0 )
|
||||
if ( pthread_cond_init( &mCondition, nullptr ) < 0 )
|
||||
throw ThreadException( stringtf( "Unable to create pthread condition: %s", strerror(errno) ) );
|
||||
}
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ inline int tvDiffUsec( struct timeval first, struct timeval last )
|
|||
inline int tvDiffUsec( struct timeval first )
|
||||
{
|
||||
struct timeval now;
|
||||
gettimeofday( &now, NULL );
|
||||
gettimeofday( &now, nullptr );
|
||||
return( tvDiffUsec( first, now ) );
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ inline int tvDiffMsec( struct timeval first, struct timeval last )
|
|||
inline int tvDiffMsec( struct timeval first )
|
||||
{
|
||||
struct timeval now;
|
||||
gettimeofday( &now, NULL );
|
||||
gettimeofday( &now, nullptr );
|
||||
return( tvDiffMsec( first, now ) );
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ inline double tvDiffSec( struct timeval first, struct timeval last )
|
|||
inline double tvDiffSec( struct timeval first )
|
||||
{
|
||||
struct timeval now;
|
||||
gettimeofday( &now, NULL );
|
||||
gettimeofday( &now, nullptr );
|
||||
return( tvDiffSec( first, now ) );
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ inline int tvEq( struct timeval t1, struct timeval t2 )
|
|||
inline struct timeval tvNow( void )
|
||||
{
|
||||
struct timeval t;
|
||||
gettimeofday( &t, NULL );
|
||||
gettimeofday( &t, nullptr );
|
||||
return( t );
|
||||
}
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ User *zmLoadUser(const char *username, const char *password) {
|
|||
" FROM `Users` WHERE `Username` = '%s' AND `Enabled` = 1",
|
||||
safer_username);
|
||||
delete[] safer_username;
|
||||
safer_username = NULL;
|
||||
safer_username = nullptr;
|
||||
|
||||
if ( mysql_query(&dbconn, sql) ) {
|
||||
Error("Can't run query: %s", mysql_error(&dbconn));
|
||||
|
@ -145,7 +145,7 @@ User *zmLoadUser(const char *username, const char *password) {
|
|||
mysql_free_result(result);
|
||||
|
||||
Warning("Unable to authenticate user %s", username);
|
||||
return NULL;
|
||||
return nullptr;
|
||||
} // end User *zmLoadUser(const char *username, const char *password)
|
||||
|
||||
User *zmLoadTokenUser(std::string jwt_token_str, bool use_remote_addr) {
|
||||
|
@ -169,7 +169,7 @@ User *zmLoadTokenUser(std::string jwt_token_str, bool use_remote_addr) {
|
|||
Debug(1, "retrieved user '%s' from token", username.c_str());
|
||||
|
||||
if ( username == "" ) {
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
char sql[ZM_SQL_MED_BUFSIZ] = "";
|
||||
|
@ -180,30 +180,30 @@ User *zmLoadTokenUser(std::string jwt_token_str, bool use_remote_addr) {
|
|||
|
||||
if ( mysql_query(&dbconn, sql) ) {
|
||||
Error("Can't run query: %s", mysql_error(&dbconn));
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
MYSQL_RES *result = mysql_store_result(&dbconn);
|
||||
if ( !result ) {
|
||||
Error("Can't use query result: %s", mysql_error(&dbconn));
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
int n_users = mysql_num_rows(result);
|
||||
if ( n_users != 1 ) {
|
||||
mysql_free_result(result);
|
||||
Error("Unable to authenticate user '%s'", username.c_str());
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
MYSQL_ROW dbrow = mysql_fetch_row(result);
|
||||
User *user = new User(dbrow);
|
||||
unsigned int stored_iat = strtoul(dbrow[10], NULL, 0);
|
||||
unsigned int stored_iat = strtoul(dbrow[10], nullptr, 0);
|
||||
|
||||
if ( stored_iat > iat ) { // admin revoked tokens
|
||||
mysql_free_result(result);
|
||||
Error("Token was revoked for '%s'", username.c_str());
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Debug(1, "Authenticated user '%s' via token with last revoke time: %u",
|
||||
|
@ -248,16 +248,16 @@ User *zmLoadAuthUser(const char *auth, bool use_remote_addr) {
|
|||
MYSQL_RES *result = mysql_store_result(&dbconn);
|
||||
if ( !result ) {
|
||||
Error("Can't use query result: %s", mysql_error(&dbconn));
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
int n_users = mysql_num_rows(result);
|
||||
if ( n_users < 1 ) {
|
||||
mysql_free_result(result);
|
||||
Warning("Unable to authenticate user");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
time_t now = time(0);
|
||||
time_t now = time(nullptr);
|
||||
unsigned int hours = config.auth_hash_ttl;
|
||||
if ( ! hours ) {
|
||||
Warning("No value set for ZM_AUTH_HASH_TTL. Defaulting to 2.");
|
||||
|
@ -323,7 +323,7 @@ User *zmLoadAuthUser(const char *auth, bool use_remote_addr) {
|
|||
Error("You need to build with gnutls or openssl to use hash based auth");
|
||||
#endif // HAVE_DECL_MD5 || HAVE_DECL_GNUTLS_FINGERPRINT
|
||||
Debug(1, "No user found for auth_key %s", auth);
|
||||
return NULL;
|
||||
return nullptr;
|
||||
} // end User *zmLoadAuthUser( const char *auth, bool use_remote_addr )
|
||||
|
||||
// Function to check Username length
|
||||
|
|
|
@ -185,7 +185,7 @@ const std::string base64Encode(const std::string &inString) {
|
|||
}
|
||||
|
||||
int split(const char* string, const char delim, std::vector<std::string>& items) {
|
||||
if ( string == NULL )
|
||||
if ( string == nullptr )
|
||||
return -1;
|
||||
|
||||
if ( string[0] == 0 )
|
||||
|
@ -206,7 +206,7 @@ int split(const char* string, const char delim, std::vector<std::string>& items)
|
|||
}
|
||||
|
||||
int pairsplit(const char* string, const char delim, std::string& name, std::string& value) {
|
||||
if ( string == NULL )
|
||||
if ( string == nullptr )
|
||||
return -1;
|
||||
|
||||
if ( string[0] == 0 )
|
||||
|
|
|
@ -57,7 +57,7 @@ int VideoWriter::Reset(const char* new_path) {
|
|||
/* Common variables reset */
|
||||
|
||||
/* If there is a new path, use it */
|
||||
if ( new_path != NULL ) {
|
||||
if ( new_path != nullptr ) {
|
||||
path = new_path;
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ X264MP4Writer::X264MP4Writer(
|
|||
}
|
||||
|
||||
/* If supplied with user parameters to the encoder, copy them */
|
||||
if ( p_user_params != NULL ) {
|
||||
if ( p_user_params != nullptr ) {
|
||||
user_params = *p_user_params;
|
||||
}
|
||||
|
||||
|
@ -144,7 +144,7 @@ X264MP4Writer::~X264MP4Writer() {
|
|||
int X264MP4Writer::Open() {
|
||||
/* Open the encoder */
|
||||
x264enc = x264_encoder_open(&x264params);
|
||||
if ( x264enc == NULL ) {
|
||||
if ( x264enc == nullptr ) {
|
||||
Error("Failed opening x264 encoder");
|
||||
return -1;
|
||||
}
|
||||
|
@ -269,7 +269,7 @@ int X264MP4Writer::Encode(
|
|||
const size_t data_size,
|
||||
const unsigned int frame_time) {
|
||||
/* Parameter checking */
|
||||
if ( data == NULL ) {
|
||||
if ( data == nullptr ) {
|
||||
Error("NULL buffer");
|
||||
return -1;
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ int X264MP4Writer::x264encodeloop(bool bFlush) {
|
|||
int frame_size;
|
||||
|
||||
if ( bFlush ) {
|
||||
frame_size = x264_encoder_encode(x264enc, &nals, &i_nals, NULL, &x264picout);
|
||||
frame_size = x264_encoder_encode(x264enc, &nals, &i_nals, nullptr, &x264picout);
|
||||
} else {
|
||||
frame_size = x264_encoder_encode(x264enc, &nals, &i_nals, &x264picin, &x264picout);
|
||||
}
|
||||
|
@ -515,12 +515,12 @@ int ParseEncoderParameters(
|
|||
const char* str,
|
||||
std::vector<EncoderParameter_t>* vec
|
||||
) {
|
||||
if ( vec == NULL ) {
|
||||
if ( vec == nullptr ) {
|
||||
Error("NULL Encoder parameters vector pointer");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ( str == NULL ) {
|
||||
if ( str == nullptr ) {
|
||||
Error("NULL Encoder parameters string");
|
||||
return -2;
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ public:
|
|||
virtual int Encode(const Image* img, const unsigned int frame_time) = 0;
|
||||
virtual int Open() = 0;
|
||||
virtual int Close() = 0;
|
||||
virtual int Reset(const char* new_path = NULL);
|
||||
virtual int Reset(const char* new_path = nullptr);
|
||||
|
||||
const char* GetContainer() const {
|
||||
return container.c_str();
|
||||
|
@ -160,13 +160,13 @@ protected:
|
|||
|
||||
|
||||
public:
|
||||
X264MP4Writer(const char* p_path, const unsigned int p_width, const unsigned int p_height, const unsigned int p_colours, const unsigned int p_subpixelorder, const std::vector<EncoderParameter_t>* p_user_params = NULL);
|
||||
X264MP4Writer(const char* p_path, const unsigned int p_width, const unsigned int p_height, const unsigned int p_colours, const unsigned int p_subpixelorder, const std::vector<EncoderParameter_t>* p_user_params = nullptr);
|
||||
~X264MP4Writer();
|
||||
int Encode(const uint8_t* data, const size_t data_size, const unsigned int frame_time);
|
||||
int Encode(const Image* img, const unsigned int frame_time);
|
||||
int Open();
|
||||
int Close();
|
||||
int Reset(const char* new_path = NULL);
|
||||
int Reset(const char* new_path = nullptr);
|
||||
|
||||
};
|
||||
#endif // HAVE_LIBX264 && HAVE_LIBMP4V2 && HAVE_LIBAVUTIL && HAVE_LIBSWSCALE
|
||||
|
|
|
@ -62,7 +62,7 @@ VideoStore::VideoStore(
|
|||
|
||||
Info("Opening video storage stream %s format: %s", filename, format);
|
||||
|
||||
int ret = avformat_alloc_output_context2(&oc, NULL, NULL, filename);
|
||||
int ret = avformat_alloc_output_context2(&oc, nullptr, nullptr, filename);
|
||||
if ( ret < 0 ) {
|
||||
Warning(
|
||||
"Could not create video storage stream %s as no out ctx"
|
||||
|
@ -74,7 +74,7 @@ VideoStore::VideoStore(
|
|||
|
||||
// Couldn't deduce format from filename, trying from format name
|
||||
if ( !oc ) {
|
||||
avformat_alloc_output_context2(&oc, NULL, format, filename);
|
||||
avformat_alloc_output_context2(&oc, nullptr, format, filename);
|
||||
if ( !oc ) {
|
||||
Error(
|
||||
"Could not create video storage stream %s as no out ctx"
|
||||
|
@ -86,7 +86,7 @@ VideoStore::VideoStore(
|
|||
}
|
||||
} // end if ! oc
|
||||
|
||||
AVDictionary *pmetadata = NULL;
|
||||
AVDictionary *pmetadata = nullptr;
|
||||
int dsr =
|
||||
av_dict_set(&pmetadata, "title", "Zoneminder Security Recording", 0);
|
||||
if ( dsr < 0 ) Warning("%s:%d: title set failed", __FILE__, __LINE__);
|
||||
|
@ -104,7 +104,7 @@ VideoStore::VideoStore(
|
|||
#endif
|
||||
}
|
||||
|
||||
video_out_stream = avformat_new_stream(oc, NULL);
|
||||
video_out_stream = avformat_new_stream(oc, nullptr);
|
||||
if ( !video_out_stream ) {
|
||||
Error("Unable to create video out stream");
|
||||
return;
|
||||
|
@ -231,11 +231,11 @@ VideoStore::VideoStore(
|
|||
video_out_codec->name,
|
||||
av_make_error_string(ret).c_str()
|
||||
);
|
||||
video_out_codec = NULL;
|
||||
video_out_codec = nullptr;
|
||||
}
|
||||
|
||||
AVDictionaryEntry *e = NULL;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != NULL ) {
|
||||
AVDictionaryEntry *e = nullptr;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != nullptr ) {
|
||||
Warning("Encoder Option %s not recognized by ffmpeg codec", e->key);
|
||||
}
|
||||
ret = avcodec_parameters_from_context(video_out_stream->codecpar, video_out_ctx);
|
||||
|
@ -275,16 +275,16 @@ VideoStore::VideoStore(
|
|||
}
|
||||
}
|
||||
|
||||
converted_in_samples = NULL;
|
||||
audio_out_codec = NULL;
|
||||
audio_in_codec = NULL;
|
||||
audio_in_ctx = NULL;
|
||||
audio_out_stream = NULL;
|
||||
in_frame = NULL;
|
||||
out_frame = NULL;
|
||||
converted_in_samples = nullptr;
|
||||
audio_out_codec = nullptr;
|
||||
audio_in_codec = nullptr;
|
||||
audio_in_ctx = nullptr;
|
||||
audio_out_stream = nullptr;
|
||||
in_frame = nullptr;
|
||||
out_frame = nullptr;
|
||||
#if defined(HAVE_LIBSWRESAMPLE) || defined(HAVE_LIBAVRESAMPLE)
|
||||
resample_ctx = NULL;
|
||||
fifo = NULL;
|
||||
resample_ctx = nullptr;
|
||||
fifo = nullptr;
|
||||
#endif
|
||||
video_first_pts = 0;
|
||||
video_first_dts = 0;
|
||||
|
@ -310,11 +310,11 @@ VideoStore::VideoStore(
|
|||
}
|
||||
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
audio_out_stream = avformat_new_stream(oc, NULL);
|
||||
audio_out_stream = avformat_new_stream(oc, nullptr);
|
||||
audio_out_ctx = avcodec_alloc_context3(audio_out_codec);
|
||||
if ( !audio_out_ctx ) {
|
||||
Error("could not allocate codec ctx for AAC");
|
||||
audio_out_stream = NULL;
|
||||
audio_out_stream = nullptr;
|
||||
return;
|
||||
}
|
||||
#else
|
||||
|
@ -329,7 +329,7 @@ VideoStore::VideoStore(
|
|||
} else {
|
||||
Debug(2, "Got AAC");
|
||||
|
||||
audio_out_stream = avformat_new_stream(oc, NULL);
|
||||
audio_out_stream = avformat_new_stream(oc, nullptr);
|
||||
if ( !audio_out_stream ) {
|
||||
Error("Could not allocate new stream");
|
||||
return;
|
||||
|
@ -366,7 +366,7 @@ VideoStore::VideoStore(
|
|||
if ( ret < 0 ) {
|
||||
Error("Unable to copy audio ctx %s",
|
||||
av_make_error_string(ret).c_str());
|
||||
audio_out_stream = NULL;
|
||||
audio_out_stream = nullptr;
|
||||
return;
|
||||
} // end if
|
||||
audio_out_ctx->codec_tag = 0;
|
||||
|
@ -403,7 +403,7 @@ bool VideoStore::open() {
|
|||
int ret;
|
||||
/* open the out file, if needed */
|
||||
if ( !(out_format->flags & AVFMT_NOFILE) ) {
|
||||
ret = avio_open2(&oc->pb, filename, AVIO_FLAG_WRITE, NULL, NULL);
|
||||
ret = avio_open2(&oc->pb, filename, AVIO_FLAG_WRITE, nullptr, nullptr);
|
||||
if ( ret < 0 ) {
|
||||
Error("Could not open out file '%s': %s", filename,
|
||||
av_make_error_string(ret).c_str());
|
||||
|
@ -414,7 +414,7 @@ bool VideoStore::open() {
|
|||
zm_dump_stream_format(oc, 0, 0, 1);
|
||||
if ( audio_out_stream ) zm_dump_stream_format(oc, 1, 0, 1);
|
||||
|
||||
AVDictionary *opts = NULL;
|
||||
AVDictionary *opts = nullptr;
|
||||
|
||||
std::string option_string = monitor->GetOptEncoderParams();
|
||||
ret = av_dict_parse_string(&opts, option_string.c_str(), "=", ",\n", 0);
|
||||
|
@ -422,7 +422,7 @@ bool VideoStore::open() {
|
|||
Warning("Could not parse ffmpeg output options '%s'", option_string.c_str());
|
||||
}
|
||||
|
||||
const AVDictionaryEntry *movflags_entry = av_dict_get(opts, "movflags", NULL, AV_DICT_MATCH_CASE);
|
||||
const AVDictionaryEntry *movflags_entry = av_dict_get(opts, "movflags", nullptr, AV_DICT_MATCH_CASE);
|
||||
if ( !movflags_entry ) {
|
||||
Debug(1, "setting movflags to frag_keyframe+empty_moov");
|
||||
// av_dict_set(&opts, "movflags", "frag_custom+dash+delay_moov", 0);
|
||||
|
@ -435,14 +435,14 @@ bool VideoStore::open() {
|
|||
}
|
||||
if ( (ret = avformat_write_header(oc, &opts)) < 0 ) {
|
||||
Warning("Unable to set movflags trying with defaults.");
|
||||
ret = avformat_write_header(oc, NULL);
|
||||
ret = avformat_write_header(oc, nullptr);
|
||||
} else if ( av_dict_count(opts) != 0 ) {
|
||||
Info("some options not used, turn on debugging for a list.");
|
||||
AVDictionaryEntry *e = NULL;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != NULL ) {
|
||||
AVDictionaryEntry *e = nullptr;
|
||||
while ( (e = av_dict_get(opts, "", e, AV_DICT_IGNORE_SUFFIX)) != nullptr ) {
|
||||
Debug(1, "Encoder Option %s=>%s", e->key, e->value);
|
||||
if ( !e->value ) {
|
||||
av_dict_set(&opts, e->key, NULL, 0);
|
||||
av_dict_set(&opts, e->key, nullptr, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -468,7 +468,7 @@ VideoStore::~VideoStore() {
|
|||
// whatever we get. Failures are not fatal.
|
||||
AVPacket pkt;
|
||||
// Without these we seg fault I don't know why.
|
||||
pkt.data = NULL;
|
||||
pkt.data = nullptr;
|
||||
pkt.size = 0;
|
||||
av_init_packet(&pkt);
|
||||
|
||||
|
@ -477,7 +477,7 @@ VideoStore::~VideoStore() {
|
|||
* At the end of the file, we pass the remaining samples to
|
||||
* the encoder. */
|
||||
while ( zm_resample_get_delay(resample_ctx, audio_out_ctx->sample_rate) ) {
|
||||
zm_resample_audio(resample_ctx, NULL, out_frame);
|
||||
zm_resample_audio(resample_ctx, nullptr, out_frame);
|
||||
|
||||
if ( zm_add_samples_to_fifo(fifo, out_frame) ) {
|
||||
// Should probably set the frame size to what is reported FIXME
|
||||
|
@ -518,7 +518,7 @@ VideoStore::~VideoStore() {
|
|||
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
// Put encoder into flushing mode
|
||||
avcodec_send_frame(audio_out_ctx, NULL);
|
||||
avcodec_send_frame(audio_out_ctx, nullptr);
|
||||
#endif
|
||||
|
||||
while (1) {
|
||||
|
@ -537,7 +537,7 @@ VideoStore::~VideoStore() {
|
|||
|
||||
// Flush Queues
|
||||
Debug(1, "Flushing interleaved queues");
|
||||
av_interleaved_write_frame(oc, NULL);
|
||||
av_interleaved_write_frame(oc, nullptr);
|
||||
|
||||
Debug(1, "Writing trailer");
|
||||
/* Write the trailer before close */
|
||||
|
@ -552,7 +552,7 @@ VideoStore::~VideoStore() {
|
|||
/* Close the out file. */
|
||||
Debug(2, "Closing");
|
||||
if ( int rc = avio_close(oc->pb) ) {
|
||||
oc->pb = NULL;
|
||||
oc->pb = nullptr;
|
||||
Error("Error closing avio %s", av_err2str(rc));
|
||||
}
|
||||
} else {
|
||||
|
@ -570,24 +570,24 @@ VideoStore::~VideoStore() {
|
|||
// We allocate and copy in newer ffmpeg, so need to free it
|
||||
//avcodec_free_context(&video_in_ctx);
|
||||
#endif
|
||||
video_in_ctx = NULL;
|
||||
video_in_ctx = nullptr;
|
||||
|
||||
if ( video_out_codec ) {
|
||||
avcodec_close(video_out_ctx);
|
||||
Debug(4, "Success closing video_out_ctx");
|
||||
video_out_codec = NULL;
|
||||
video_out_codec = nullptr;
|
||||
} // end if video_out_codec
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
avcodec_free_context(&video_out_ctx);
|
||||
#endif
|
||||
video_out_ctx = NULL;
|
||||
video_out_ctx = nullptr;
|
||||
} // end if video_out_stream
|
||||
|
||||
if ( audio_out_stream ) {
|
||||
if ( audio_in_codec ) {
|
||||
avcodec_close(audio_in_ctx);
|
||||
Debug(4, "Success closing audio_in_ctx");
|
||||
audio_in_codec = NULL;
|
||||
audio_in_codec = nullptr;
|
||||
} // end if audio_in_codec
|
||||
|
||||
#if LIBAVCODEC_VERSION_CHECK(57, 64, 0, 64, 0)
|
||||
|
@ -595,7 +595,7 @@ VideoStore::~VideoStore() {
|
|||
avcodec_free_context(&audio_in_ctx);
|
||||
#endif
|
||||
Debug(4, "Success freeing audio_in_ctx");
|
||||
audio_in_ctx = NULL;
|
||||
audio_in_ctx = nullptr;
|
||||
|
||||
if ( audio_out_ctx ) {
|
||||
avcodec_close(audio_out_ctx);
|
||||
|
@ -604,13 +604,13 @@ VideoStore::~VideoStore() {
|
|||
avcodec_free_context(&audio_out_ctx);
|
||||
#endif
|
||||
}
|
||||
audio_out_ctx = NULL;
|
||||
audio_out_ctx = nullptr;
|
||||
|
||||
#if defined(HAVE_LIBAVRESAMPLE) || defined(HAVE_LIBSWRESAMPLE)
|
||||
if ( resample_ctx ) {
|
||||
if ( fifo ) {
|
||||
av_audio_fifo_free(fifo);
|
||||
fifo = NULL;
|
||||
fifo = nullptr;
|
||||
}
|
||||
#if defined(HAVE_LIBSWRESAMPLE)
|
||||
swr_free(&resample_ctx);
|
||||
|
@ -623,15 +623,15 @@ VideoStore::~VideoStore() {
|
|||
}
|
||||
if ( in_frame ) {
|
||||
av_frame_free(&in_frame);
|
||||
in_frame = NULL;
|
||||
in_frame = nullptr;
|
||||
}
|
||||
if ( out_frame ) {
|
||||
av_frame_free(&out_frame);
|
||||
out_frame = NULL;
|
||||
out_frame = nullptr;
|
||||
}
|
||||
if ( converted_in_samples ) {
|
||||
av_free(converted_in_samples);
|
||||
converted_in_samples = NULL;
|
||||
converted_in_samples = nullptr;
|
||||
}
|
||||
#endif
|
||||
} // end if audio_out_stream
|
||||
|
@ -690,7 +690,7 @@ bool VideoStore::setup_resampler() {
|
|||
#endif
|
||||
|
||||
// if the codec is already open, nothing is done.
|
||||
if ( (ret = avcodec_open2(audio_in_ctx, audio_in_codec, NULL)) < 0 ) {
|
||||
if ( (ret = avcodec_open2(audio_in_ctx, audio_in_codec, nullptr)) < 0 ) {
|
||||
Error("Can't open audio in codec!");
|
||||
return false;
|
||||
}
|
||||
|
@ -747,7 +747,7 @@ bool VideoStore::setup_resampler() {
|
|||
|
||||
audio_out_ctx->time_base = (AVRational){1, audio_out_ctx->sample_rate};
|
||||
|
||||
AVDictionary *opts = NULL;
|
||||
AVDictionary *opts = nullptr;
|
||||
if ( (ret = av_dict_set(&opts, "strict", "experimental", 0)) < 0 ) {
|
||||
Error("Couldn't set experimental");
|
||||
}
|
||||
|
@ -756,9 +756,9 @@ bool VideoStore::setup_resampler() {
|
|||
if ( ret < 0 ) {
|
||||
Error("could not open codec (%d) (%s)",
|
||||
ret, av_make_error_string(ret).c_str());
|
||||
audio_out_codec = NULL;
|
||||
audio_out_ctx = NULL;
|
||||
audio_out_stream = NULL;
|
||||
audio_out_codec = nullptr;
|
||||
audio_out_ctx = nullptr;
|
||||
audio_out_stream = nullptr;
|
||||
return false;
|
||||
}
|
||||
zm_dump_codec(audio_out_ctx);
|
||||
|
@ -814,14 +814,14 @@ bool VideoStore::setup_resampler() {
|
|||
return false;
|
||||
}
|
||||
#if defined(HAVE_LIBSWRESAMPLE)
|
||||
resample_ctx = swr_alloc_set_opts(NULL,
|
||||
resample_ctx = swr_alloc_set_opts(nullptr,
|
||||
audio_out_ctx->channel_layout,
|
||||
audio_out_ctx->sample_fmt,
|
||||
audio_out_ctx->sample_rate,
|
||||
audio_in_ctx->channel_layout,
|
||||
audio_in_ctx->sample_fmt,
|
||||
audio_in_ctx->sample_rate,
|
||||
0, NULL);
|
||||
0, nullptr);
|
||||
if ( !resample_ctx ) {
|
||||
Error("Could not allocate resample context");
|
||||
av_frame_free(&in_frame);
|
||||
|
@ -885,7 +885,7 @@ bool VideoStore::setup_resampler() {
|
|||
// The codec gives us the frame size, in samples, we calculate the size of the
|
||||
// samples buffer in bytes
|
||||
unsigned int audioSampleBuffer_size = av_samples_get_buffer_size(
|
||||
NULL, audio_out_ctx->channels,
|
||||
nullptr, audio_out_ctx->channels,
|
||||
audio_out_ctx->frame_size,
|
||||
audio_out_ctx->sample_fmt, 0);
|
||||
converted_in_samples = reinterpret_cast<uint8_t *>(av_malloc(audioSampleBuffer_size));
|
||||
|
@ -1013,7 +1013,7 @@ int VideoStore::writeAudioFramePacket(AVPacket *ipkt) {
|
|||
if ( zm_resample_get_delay(resample_ctx, out_frame->sample_rate) < out_frame->nb_samples)
|
||||
break;
|
||||
// This will send a null frame, emptying out the resample buffer
|
||||
input_frame = NULL;
|
||||
input_frame = nullptr;
|
||||
} // end while there is data in the resampler
|
||||
|
||||
} else {
|
||||
|
|
|
@ -83,7 +83,7 @@ void Zone::Setup(
|
|||
alarm_blobs = 0;
|
||||
min_blob_size = 0;
|
||||
max_blob_size = 0;
|
||||
image = 0;
|
||||
image = nullptr;
|
||||
score = 0;
|
||||
|
||||
overload_count = 0;
|
||||
|
|
18
src/zma.cpp
18
src/zma.cpp
|
@ -70,15 +70,15 @@ void Usage() {
|
|||
int main( int argc, char *argv[] ) {
|
||||
self = argv[0];
|
||||
|
||||
srand(getpid() * time(0));
|
||||
srand(getpid() * time(nullptr));
|
||||
|
||||
int id = -1;
|
||||
|
||||
static struct option long_options[] = {
|
||||
{"monitor", 1, 0, 'm'},
|
||||
{"help", 0, 0, 'h'},
|
||||
{"version", 0, 0, 'v'},
|
||||
{0, 0, 0, 0}
|
||||
{"monitor", 1, nullptr, 'm'},
|
||||
{"help", 0, nullptr, 'h'},
|
||||
{"version", 0, nullptr, 'v'},
|
||||
{nullptr, 0, nullptr, 0}
|
||||
};
|
||||
|
||||
while (1) {
|
||||
|
@ -146,15 +146,15 @@ int main( int argc, char *argv[] ) {
|
|||
unsigned int analysis_update_delay = monitor->GetAnalysisUpdateDelay();
|
||||
time_t last_analysis_update_time, cur_time;
|
||||
monitor->UpdateAdaptiveSkip();
|
||||
last_analysis_update_time = time(0);
|
||||
last_analysis_update_time = time(nullptr);
|
||||
|
||||
while( (!zm_terminate) && monitor->ShmValid() ) {
|
||||
// Process the next image
|
||||
sigprocmask(SIG_BLOCK, &block_set, 0);
|
||||
sigprocmask(SIG_BLOCK, &block_set, nullptr);
|
||||
|
||||
// Some periodic updates are required for variable capturing framerate
|
||||
if ( analysis_update_delay ) {
|
||||
cur_time = time(0);
|
||||
cur_time = time(nullptr);
|
||||
if ( (unsigned int)( cur_time - last_analysis_update_time ) > analysis_update_delay ) {
|
||||
analysis_rate = monitor->GetAnalysisRate();
|
||||
monitor->UpdateAdaptiveSkip();
|
||||
|
@ -174,7 +174,7 @@ int main( int argc, char *argv[] ) {
|
|||
logInit(log_id_string);
|
||||
zm_reload = false;
|
||||
}
|
||||
sigprocmask(SIG_UNBLOCK, &block_set, 0);
|
||||
sigprocmask(SIG_UNBLOCK, &block_set, nullptr);
|
||||
} // end while ! zm_terminate
|
||||
delete monitor;
|
||||
} else {
|
||||
|
|
32
src/zmc.cpp
32
src/zmc.cpp
|
@ -90,7 +90,7 @@ void Usage() {
|
|||
int main(int argc, char *argv[]) {
|
||||
self = argv[0];
|
||||
|
||||
srand(getpid() * time(0));
|
||||
srand(getpid() * time(nullptr));
|
||||
|
||||
const char *device = "";
|
||||
const char *protocol = "";
|
||||
|
@ -101,16 +101,16 @@ int main(int argc, char *argv[]) {
|
|||
int monitor_id = -1;
|
||||
|
||||
static struct option long_options[] = {
|
||||
{"device", 1, 0, 'd'},
|
||||
{"protocol", 1, 0, 'r'},
|
||||
{"host", 1, 0, 'H'},
|
||||
{"port", 1, 0, 'P'},
|
||||
{"path", 1, 0, 'p'},
|
||||
{"file", 1, 0, 'f'},
|
||||
{"monitor", 1, 0, 'm'},
|
||||
{"help", 0, 0, 'h'},
|
||||
{"version", 0, 0, 'v'},
|
||||
{0, 0, 0, 0}
|
||||
{"device", 1, nullptr, 'd'},
|
||||
{"protocol", 1, nullptr, 'r'},
|
||||
{"host", 1, nullptr, 'H'},
|
||||
{"port", 1, nullptr, 'P'},
|
||||
{"path", 1, nullptr, 'p'},
|
||||
{"file", 1, nullptr, 'f'},
|
||||
{"monitor", 1, nullptr, 'm'},
|
||||
{"help", 0, nullptr, 'h'},
|
||||
{"version", 0, nullptr, 'v'},
|
||||
{nullptr, 0, nullptr, 0}
|
||||
};
|
||||
|
||||
while (1) {
|
||||
|
@ -193,7 +193,7 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
hwcaps_detect();
|
||||
|
||||
Monitor **monitors = 0;
|
||||
Monitor **monitors = nullptr;
|
||||
int n_monitors = 0;
|
||||
#if ZM_HAS_V4L
|
||||
if ( device[0] ) {
|
||||
|
@ -240,7 +240,7 @@ int main(int argc, char *argv[]) {
|
|||
result = 0;
|
||||
static char sql[ZM_SQL_SML_BUFSIZ];
|
||||
for ( int i = 0; i < n_monitors; i++ ) {
|
||||
time_t now = (time_t)time(NULL);
|
||||
time_t now = (time_t)time(nullptr);
|
||||
monitors[i]->setStartupTime(now);
|
||||
|
||||
snprintf(sql, sizeof(sql),
|
||||
|
@ -287,7 +287,7 @@ int main(int argc, char *argv[]) {
|
|||
for ( int i = 0; i < n_monitors; i++ ) {
|
||||
long min_delay = MAXINT;
|
||||
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
for ( int j = 0; j < n_monitors; j++ ) {
|
||||
if ( last_capture_times[j].tv_sec ) {
|
||||
DELTA_TIMEVAL(delta_time, now, last_capture_times[j], DT_PREC_3);
|
||||
|
@ -329,14 +329,14 @@ int main(int argc, char *argv[]) {
|
|||
}
|
||||
|
||||
if ( next_delays[i] > 0 ) {
|
||||
gettimeofday(&now, NULL);
|
||||
gettimeofday(&now, nullptr);
|
||||
DELTA_TIMEVAL(delta_time, now, last_capture_times[i], DT_PREC_3);
|
||||
long sleep_time = next_delays[i]-delta_time.delta;
|
||||
if ( sleep_time > 0 ) {
|
||||
usleep(sleep_time*(DT_MAXGRAN/DT_PREC_3));
|
||||
}
|
||||
}
|
||||
gettimeofday(&(last_capture_times[i]), NULL);
|
||||
gettimeofday(&(last_capture_times[i]), nullptr);
|
||||
} // end if next_delay <= min_delay || next_delays[i] <= 0 )
|
||||
|
||||
} // end foreach n_monitors
|
||||
|
|
20
src/zms.cpp
20
src/zms.cpp
|
@ -53,7 +53,7 @@ bool ValidateAccess(User *user, int mon_id) {
|
|||
int main(int argc, const char *argv[]) {
|
||||
self = argv[0];
|
||||
|
||||
srand(getpid() * time(0));
|
||||
srand(getpid() * time(nullptr));
|
||||
|
||||
enum { ZMS_UNKNOWN, ZMS_MONITOR, ZMS_EVENT, ZMS_FIFO } source = ZMS_UNKNOWN;
|
||||
enum { ZMS_JPEG, ZMS_MPEG, ZMS_RAW, ZMS_ZIP, ZMS_SINGLE } mode = ZMS_JPEG;
|
||||
|
@ -101,12 +101,12 @@ int main(int argc, const char *argv[]) {
|
|||
int parm_no = 0;
|
||||
while ( (parm_no < 16) && (parms[parm_no] = strtok(q_ptr, "&")) ) {
|
||||
parm_no++;
|
||||
q_ptr = NULL;
|
||||
q_ptr = nullptr;
|
||||
}
|
||||
|
||||
for ( int p = 0; p < parm_no; p++ ) {
|
||||
char *name = strtok(parms[p], "=");
|
||||
char const *value = strtok(NULL, "=");
|
||||
char const *value = strtok(nullptr, "=");
|
||||
if ( !value )
|
||||
value = "";
|
||||
if ( !strcmp(name, "source") ) {
|
||||
|
@ -127,10 +127,10 @@ int main(int argc, const char *argv[]) {
|
|||
} else if ( !strcmp(name, "time") ) {
|
||||
event_time = atoi(value);
|
||||
} else if ( !strcmp(name, "event") ) {
|
||||
event_id = strtoull(value, NULL, 10);
|
||||
event_id = strtoull(value, nullptr, 10);
|
||||
source = ZMS_EVENT;
|
||||
} else if ( !strcmp(name, "frame") ) {
|
||||
frame_id = strtoull(value, NULL, 10);
|
||||
frame_id = strtoull(value, nullptr, 10);
|
||||
source = ZMS_EVENT;
|
||||
} else if ( !strcmp(name, "scale") ) {
|
||||
scale = atoi(value);
|
||||
|
@ -184,7 +184,7 @@ int main(int argc, const char *argv[]) {
|
|||
logInit(log_id_string);
|
||||
|
||||
if ( config.opt_use_auth ) {
|
||||
User *user = NULL;
|
||||
User *user = nullptr;
|
||||
|
||||
if ( jwt_token_str != "" ) {
|
||||
// user = zmLoadTokenUser(jwt_token_str, config.auth_hash_ips);
|
||||
|
@ -211,27 +211,27 @@ int main(int argc, const char *argv[]) {
|
|||
}
|
||||
if ( !ValidateAccess(user, monitor_id) ) {
|
||||
delete user;
|
||||
user = NULL;
|
||||
user = nullptr;
|
||||
fputs("HTTP/1.0 403 Forbidden\r\n\r\n", stdout);
|
||||
logTerm();
|
||||
zmDbClose();
|
||||
return 0;
|
||||
}
|
||||
delete user;
|
||||
user = NULL;
|
||||
user = nullptr;
|
||||
} // end if config.opt_use_auth
|
||||
|
||||
hwcaps_detect();
|
||||
zmSetDefaultTermHandler();
|
||||
zmSetDefaultDieHandler();
|
||||
|
||||
setbuf(stdout, 0);
|
||||
setbuf(stdout, nullptr);
|
||||
if ( nph ) {
|
||||
fputs("HTTP/1.0 200 OK\r\n", stdout);
|
||||
}
|
||||
fprintf(stdout, "Server: ZoneMinder Video Server/%s\r\n", ZM_VERSION);
|
||||
|
||||
time_t now = time(0);
|
||||
time_t now = time(nullptr);
|
||||
char date_string[64];
|
||||
strftime(date_string, sizeof(date_string)-1,
|
||||
"%a, %d %b %Y %H:%M:%S GMT", gmtime(&now));
|
||||
|
|
82
src/zmu.cpp
82
src/zmu.cpp
|
@ -211,45 +211,45 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
self = argv[0];
|
||||
|
||||
srand(getpid() * time(0));
|
||||
srand(getpid() * time(nullptr));
|
||||
|
||||
static struct option long_options[] = {
|
||||
{"device", 2, 0, 'd'},
|
||||
{"monitor", 1, 0, 'm'},
|
||||
{"verbose", 0, 0, 'v'},
|
||||
{"image", 2, 0, 'i'},
|
||||
{"scale", 1, 0, 'S'},
|
||||
{"timestamp", 2, 0, 't'},
|
||||
{"state", 0, 0, 's'},
|
||||
{"brightness", 2, 0, 'B'},
|
||||
{"contrast", 2, 0, 'C'},
|
||||
{"hue", 2, 0, 'H'},
|
||||
{"contrast", 2, 0, 'O'},
|
||||
{"read_index", 0, 0, 'R'},
|
||||
{"write_index", 0, 0, 'W'},
|
||||
{"event", 0, 0, 'e'},
|
||||
{"fps", 0, 0, 'f'},
|
||||
{"zones", 2, 0, 'z'},
|
||||
{"alarm", 0, 0, 'a'},
|
||||
{"noalarm", 0, 0, 'n'},
|
||||
{"cancel", 0, 0, 'c'},
|
||||
{"reload", 0, 0, 'L'},
|
||||
{"enable", 0, 0, 'E'},
|
||||
{"disable", 0, 0, 'D'},
|
||||
{"suspend", 0, 0, 'u'},
|
||||
{"resume", 0, 0, 'r'},
|
||||
{"query", 0, 0, 'q'},
|
||||
{"username", 1, 0, 'U'},
|
||||
{"password", 1, 0, 'P'},
|
||||
{"auth", 1, 0, 'A'},
|
||||
{"token", 1, 0, 'T'},
|
||||
{"version", 1, 0, 'V'},
|
||||
{"help", 0, 0, 'h'},
|
||||
{"list", 0, 0, 'l'},
|
||||
{0, 0, 0, 0}
|
||||
{"device", 2, nullptr, 'd'},
|
||||
{"monitor", 1, nullptr, 'm'},
|
||||
{"verbose", 0, nullptr, 'v'},
|
||||
{"image", 2, nullptr, 'i'},
|
||||
{"scale", 1, nullptr, 'S'},
|
||||
{"timestamp", 2, nullptr, 't'},
|
||||
{"state", 0, nullptr, 's'},
|
||||
{"brightness", 2, nullptr, 'B'},
|
||||
{"contrast", 2, nullptr, 'C'},
|
||||
{"hue", 2, nullptr, 'H'},
|
||||
{"contrast", 2, nullptr, 'O'},
|
||||
{"read_index", 0, nullptr, 'R'},
|
||||
{"write_index", 0, nullptr, 'W'},
|
||||
{"event", 0, nullptr, 'e'},
|
||||
{"fps", 0, nullptr, 'f'},
|
||||
{"zones", 2, nullptr, 'z'},
|
||||
{"alarm", 0, nullptr, 'a'},
|
||||
{"noalarm", 0, nullptr, 'n'},
|
||||
{"cancel", 0, nullptr, 'c'},
|
||||
{"reload", 0, nullptr, 'L'},
|
||||
{"enable", 0, nullptr, 'E'},
|
||||
{"disable", 0, nullptr, 'D'},
|
||||
{"suspend", 0, nullptr, 'u'},
|
||||
{"resume", 0, nullptr, 'r'},
|
||||
{"query", 0, nullptr, 'q'},
|
||||
{"username", 1, nullptr, 'U'},
|
||||
{"password", 1, nullptr, 'P'},
|
||||
{"auth", 1, nullptr, 'A'},
|
||||
{"token", 1, nullptr, 'T'},
|
||||
{"version", 1, nullptr, 'V'},
|
||||
{"help", 0, nullptr, 'h'},
|
||||
{"list", 0, nullptr, 'l'},
|
||||
{nullptr, 0, nullptr, 0}
|
||||
};
|
||||
|
||||
const char *device = 0;
|
||||
const char *device = nullptr;
|
||||
int mon_id = 0;
|
||||
bool verbose = false;
|
||||
int function = ZMU_BOGUS;
|
||||
|
@ -260,10 +260,10 @@ int main(int argc, char *argv[]) {
|
|||
int contrast = -1;
|
||||
int hue = -1;
|
||||
int colour = -1;
|
||||
char *zoneString = 0;
|
||||
char *username = 0;
|
||||
char *password = 0;
|
||||
char *auth = 0;
|
||||
char *zoneString = nullptr;
|
||||
char *username = nullptr;
|
||||
char *password = nullptr;
|
||||
char *auth = nullptr;
|
||||
std::string jwt_token_str = "";
|
||||
#if ZM_HAS_V4L
|
||||
#if ZM_HAS_V4L2
|
||||
|
@ -487,7 +487,7 @@ int main(int argc, char *argv[]) {
|
|||
if ( !monitor->connect() ) {
|
||||
Error("Can't connect to capture daemon: %d %s", monitor->Id(), monitor->Name());
|
||||
delete monitor;
|
||||
monitor = NULL;
|
||||
monitor = nullptr;
|
||||
exit_zmu(-1);
|
||||
}
|
||||
|
||||
|
@ -701,7 +701,7 @@ int main(int argc, char *argv[]) {
|
|||
Usage();
|
||||
}
|
||||
delete monitor;
|
||||
monitor = NULL;
|
||||
monitor = nullptr;
|
||||
} else { // non monitor functions
|
||||
if ( function & ZMU_QUERY ) {
|
||||
#if ZM_HAS_V4L
|
||||
|
|
Loading…
Reference in New Issue