1) RTSP support for direct capture and target colorspace.
2) Hopefully fixed the mmap unexpected shared memory size completely this time. 3) Added 32bit RGB and grayscale support to the signal checking. 4) Fixed zms\nph-zms keeps running after changing capture options, eating CPU and has to be killed with -9. 5) Default monitor options changed again. 6) Fixed unrelated bug: Undefined constant ZM_V4L2 in monitor probe. 7) Fixed unrelated bug: Error in offset X in monitor probe. 8) Changed monitor probe's prefered capture settings. 9) Small other minor changes.
This commit is contained in:
parent
b0bc227232
commit
642a1bfdb2
|
@ -38,6 +38,7 @@ FfmpegCamera::FfmpegCamera( int p_id, const std::string &p_path, int p_width, in
|
|||
mCodec = NULL;
|
||||
mRawFrame = NULL;
|
||||
mFrame = NULL;
|
||||
frameCount = 0;
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
mConvertContext = NULL;
|
||||
|
@ -183,8 +184,7 @@ int FfmpegCamera::PreCapture()
|
|||
|
||||
int FfmpegCamera::Capture( Image &image )
|
||||
{
|
||||
static int frameCount = 0;
|
||||
AVPacket packet;
|
||||
AVPacket packet;
|
||||
uint8_t* directbuffer;
|
||||
|
||||
/* Request a writeable buffer of the target image */
|
||||
|
|
|
@ -35,6 +35,8 @@ class FfmpegCamera : public Camera
|
|||
protected:
|
||||
std::string mPath;
|
||||
|
||||
int frameCount;
|
||||
|
||||
#if HAVE_LIBAVFORMAT
|
||||
AVFormatContext *mFormatContext;
|
||||
int mVideoStreamId;
|
||||
|
|
|
@ -354,7 +354,7 @@ Monitor::Monitor(
|
|||
struct stat map_stat;
|
||||
if ( fstat( map_fd, &map_stat ) < 0 )
|
||||
Fatal( "Can't stat memory map file %s: %s", mem_file, strerror(errno) );
|
||||
if ( map_stat.st_size == 0 )
|
||||
if ( map_stat.st_size != mem_size && purpose == CAPTURE )
|
||||
{
|
||||
// Allocate the size
|
||||
if ( ftruncate( map_fd, mem_size ) < 0 )
|
||||
|
@ -1007,23 +1007,25 @@ bool Monitor::CheckSignal( const Image *image )
|
|||
{
|
||||
static bool static_undef = true;
|
||||
/* RGB24 colors */
|
||||
static unsigned char red_val;
|
||||
static unsigned char green_val;
|
||||
static unsigned char blue_val;
|
||||
|
||||
static Rgb color_val; /* RGB32 color */
|
||||
static uint8_t grayscale_val; /* 8bit grayscale color */
|
||||
static uint8_t red_val;
|
||||
static uint8_t green_val;
|
||||
static uint8_t blue_val;
|
||||
static uint8_t grayscale_val; /* 8bit grayscale color */
|
||||
static Rgb colour_val; /* RGB32 color */
|
||||
static int usedsubpixorder;
|
||||
|
||||
if ( config.signal_check_points > 0 )
|
||||
{
|
||||
if ( static_undef )
|
||||
{
|
||||
static_undef = false;
|
||||
red_val = RED_VAL_RGBA(signal_check_colour);
|
||||
green_val = GREEN_VAL_RGBA(signal_check_colour);
|
||||
blue_val = BLUE_VAL_RGBA(signal_check_colour);
|
||||
color_val = RGBA_BGRA_ZEROALPHA(signal_check_colour); /* Clear alpha byte */
|
||||
grayscale_val = 0xff & signal_check_colour; /* Clear all bytes but lowest byte */
|
||||
usedsubpixorder = camera->SubpixelOrder();
|
||||
colour_val = rgb_convert(signal_check_colour, ZM_SUBPIX_ORDER_BGR); /* HTML colour code is actually BGR in memory, we want RGB */
|
||||
colour_val = rgb_convert(colour_val, usedsubpixorder);
|
||||
red_val = RED_VAL_BGRA(signal_check_colour);
|
||||
green_val = GREEN_VAL_BGRA(signal_check_colour);
|
||||
blue_val = BLUE_VAL_BGRA(signal_check_colour);
|
||||
grayscale_val = signal_check_colour & 0xff; /* Clear all bytes but lowest byte */
|
||||
}
|
||||
|
||||
const uint8_t *buffer = image->Buffer();
|
||||
|
@ -1044,20 +1046,33 @@ bool Monitor::CheckSignal( const Image *image )
|
|||
break;
|
||||
}
|
||||
|
||||
if(colours == 1) {
|
||||
if ( *(buffer+index) != grayscale_val )
|
||||
return true;
|
||||
|
||||
} else if(colours == 3) {
|
||||
const uint8_t *ptr = buffer+(index*colours);
|
||||
if ( (RED_PTR_RGBA(ptr) != red_val) || (GREEN_PTR_RGBA(ptr) != green_val) || (BLUE_PTR_RGBA(ptr) != blue_val) )
|
||||
return true;
|
||||
|
||||
} else if(colours == 4) {
|
||||
if ( RGBA_BGRA_ZEROALPHA(*(((const Rgb*)buffer)+index)) != color_val )
|
||||
return true;
|
||||
}
|
||||
|
||||
if(colours == ZM_COLOUR_GRAY8) {
|
||||
if ( *(buffer+index) != grayscale_val )
|
||||
return true;
|
||||
|
||||
} else if(colours == ZM_COLOUR_RGB24) {
|
||||
const uint8_t *ptr = buffer+(index*colours);
|
||||
|
||||
if ( usedsubpixorder == ZM_SUBPIX_ORDER_BGR) {
|
||||
if ( (RED_PTR_BGRA(ptr) != red_val) || (GREEN_PTR_BGRA(ptr) != green_val) || (BLUE_PTR_BGRA(ptr) != blue_val) )
|
||||
return true;
|
||||
} else {
|
||||
/* Assume RGB */
|
||||
if ( (RED_PTR_RGBA(ptr) != red_val) || (GREEN_PTR_RGBA(ptr) != green_val) || (BLUE_PTR_RGBA(ptr) != blue_val) )
|
||||
return true;
|
||||
}
|
||||
|
||||
} else if(colours == ZM_COLOUR_RGB32) {
|
||||
if ( usedsubpixorder == ZM_SUBPIX_ORDER_ARGB || usedsubpixorder == ZM_SUBPIX_ORDER_ABGR) {
|
||||
if ( ARGB_ABGR_ZEROALPHA(*(((const Rgb*)buffer)+index)) != ARGB_ABGR_ZEROALPHA(colour_val) )
|
||||
return true;
|
||||
} else {
|
||||
/* Assume RGBA or BGRA */
|
||||
if ( RGBA_BGRA_ZEROALPHA(*(((const Rgb*)buffer)+index)) != RGBA_BGRA_ZEROALPHA(colour_val) )
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return( false );
|
||||
}
|
||||
|
@ -3498,7 +3513,7 @@ void MonitorStream::runStream()
|
|||
while ( !zm_terminate )
|
||||
{
|
||||
bool got_command = false;
|
||||
if ( feof( stdout ) || ferror( stdout ) )
|
||||
if ( feof( stdout ) || ferror( stdout ) || !monitor->ShmValid() )
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -47,10 +47,53 @@ RemoteCameraRtsp::RemoteCameraRtsp( int p_id, const std::string &p_method, const
|
|||
{
|
||||
Initialise();
|
||||
}
|
||||
|
||||
mFormatContext = NULL;
|
||||
mVideoStreamId = -1;
|
||||
mCodecContext = NULL;
|
||||
mCodec = NULL;
|
||||
mRawFrame = NULL;
|
||||
mFrame = NULL;
|
||||
frameCount = 0;
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
mConvertContext = NULL;
|
||||
#endif
|
||||
/* Has to be located inside the constructor so other components such as zma will receive correct colours and subpixel order */
|
||||
if(colours == ZM_COLOUR_RGB32) {
|
||||
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
|
||||
imagePixFormat = PIX_FMT_RGBA;
|
||||
} else if(colours == ZM_COLOUR_RGB24) {
|
||||
subpixelorder = ZM_SUBPIX_ORDER_RGB;
|
||||
imagePixFormat = PIX_FMT_RGB24;
|
||||
} else if(colours == ZM_COLOUR_GRAY8) {
|
||||
subpixelorder = ZM_SUBPIX_ORDER_NONE;
|
||||
imagePixFormat = PIX_FMT_GRAY8;
|
||||
} else {
|
||||
Panic("Unexpected colours: %d",colours);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
RemoteCameraRtsp::~RemoteCameraRtsp()
|
||||
{
|
||||
av_freep( &mFrame );
|
||||
av_freep( &mRawFrame );
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
if ( mConvertContext )
|
||||
{
|
||||
sws_freeContext( mConvertContext );
|
||||
mConvertContext = NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
if ( mCodecContext )
|
||||
{
|
||||
avcodec_close( mCodecContext );
|
||||
mCodecContext = NULL; // Freed by av_close_input_file
|
||||
}
|
||||
|
||||
if ( capture )
|
||||
{
|
||||
Terminate();
|
||||
|
@ -72,17 +115,11 @@ void RemoteCameraRtsp::Initialise()
|
|||
|
||||
av_register_all();
|
||||
|
||||
frameCount = 0;
|
||||
|
||||
Connect();
|
||||
}
|
||||
|
||||
void RemoteCameraRtsp::Terminate()
|
||||
{
|
||||
avcodec_close( codecContext );
|
||||
av_free( codecContext );
|
||||
av_free( picture );
|
||||
|
||||
Disconnect();
|
||||
}
|
||||
|
||||
|
@ -119,36 +156,67 @@ int RemoteCameraRtsp::PrimeCapture()
|
|||
|
||||
Debug( 2, "Got sources" );
|
||||
|
||||
formatContext = rtspThread->getFormatContext();
|
||||
mFormatContext = rtspThread->getFormatContext();
|
||||
|
||||
// Find the first video stream
|
||||
int videoStream=-1;
|
||||
for ( int i = 0; i < formatContext->nb_streams; i++ )
|
||||
// Find first video stream present
|
||||
mVideoStreamId = -1;
|
||||
|
||||
for ( int i = 0; i < mFormatContext->nb_streams; i++ )
|
||||
#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(51,2,1)
|
||||
if ( formatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO )
|
||||
if ( mFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO )
|
||||
#else
|
||||
if ( formatContext->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO )
|
||||
if ( mFormatContext->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO )
|
||||
#endif
|
||||
{
|
||||
videoStream = i;
|
||||
mVideoStreamId = i;
|
||||
break;
|
||||
}
|
||||
if ( videoStream == -1 )
|
||||
if ( mVideoStreamId == -1 )
|
||||
Fatal( "Unable to locate video stream" );
|
||||
|
||||
// Get a pointer to the codec context for the video stream
|
||||
codecContext = formatContext->streams[videoStream]->codec;
|
||||
mCodecContext = mFormatContext->streams[mVideoStreamId]->codec;
|
||||
|
||||
// Find the decoder for the video stream
|
||||
codec = avcodec_find_decoder( codecContext->codec_id );
|
||||
if ( codec == NULL )
|
||||
Panic( "Unable to locate codec %d decoder", codecContext->codec_id );
|
||||
mCodec = avcodec_find_decoder( mCodecContext->codec_id );
|
||||
if ( mCodec == NULL )
|
||||
Panic( "Unable to locate codec %d decoder", mCodecContext->codec_id );
|
||||
|
||||
// Open codec
|
||||
if ( avcodec_open( codecContext, codec ) < 0 )
|
||||
if ( avcodec_open( mCodecContext, mCodec ) < 0 )
|
||||
Panic( "Can't open codec" );
|
||||
|
||||
picture = avcodec_alloc_frame();
|
||||
// Allocate space for the native video frame
|
||||
mRawFrame = avcodec_alloc_frame();
|
||||
|
||||
// Allocate space for the converted video frame
|
||||
mFrame = avcodec_alloc_frame();
|
||||
|
||||
if(mRawFrame == NULL || mFrame == NULL)
|
||||
Fatal( "Unable to allocate frame(s)");
|
||||
|
||||
int pSize = avpicture_get_size( imagePixFormat, width, height );
|
||||
if( pSize != imagesize) {
|
||||
Fatal("Image size mismatch. Required: %d Available: %d",pSize,imagesize);
|
||||
}
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
if(!sws_isSupportedOutput(imagePixFormat)) {
|
||||
Fatal("swscale does not support the target format");
|
||||
}
|
||||
|
||||
if(config.cpu_extensions && sseversion >= 20) {
|
||||
mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC | SWS_CPU_CAPS_SSE2, NULL, NULL, NULL );
|
||||
} else {
|
||||
mConvertContext = sws_getContext( mCodecContext->width, mCodecContext->height, mCodecContext->pix_fmt, width, height, imagePixFormat, SWS_BICUBIC, NULL, NULL, NULL );
|
||||
}
|
||||
|
||||
if(mConvertContext == NULL)
|
||||
Fatal( "Unable to create conversion context");
|
||||
#else // HAVE_LIBSWSCALE
|
||||
Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" );
|
||||
#endif // HAVE_LIBSWSCALE
|
||||
|
||||
|
||||
return( 0 );
|
||||
}
|
||||
|
@ -167,101 +235,71 @@ int RemoteCameraRtsp::PreCapture()
|
|||
|
||||
int RemoteCameraRtsp::Capture( Image &image )
|
||||
{
|
||||
AVPacket packet;
|
||||
uint8_t* directbuffer;
|
||||
|
||||
/* Request a writeable buffer of the target image */
|
||||
directbuffer = image.WriteBuffer(width, height, colours, subpixelorder);
|
||||
if(directbuffer == NULL) {
|
||||
Error("Failed requesting writeable buffer for the captured image.");
|
||||
return (-1);
|
||||
}
|
||||
|
||||
while ( true )
|
||||
{
|
||||
buffer.clear();
|
||||
if ( !rtspThread->isRunning() )
|
||||
break;
|
||||
//if ( rtspThread->stopped() )
|
||||
//break;
|
||||
return (-1);
|
||||
|
||||
if ( rtspThread->getFrame( buffer ) )
|
||||
{
|
||||
Debug( 3, "Read frame %d bytes", buffer.size() );
|
||||
Debug( 4, "Address %p", buffer.head() );
|
||||
Hexdump( 4, buffer.head(), 16 );
|
||||
|
||||
static AVFrame *tmp_picture = NULL;
|
||||
|
||||
if ( !tmp_picture )
|
||||
{
|
||||
//if ( c->pix_fmt != pf )
|
||||
//{
|
||||
tmp_picture = avcodec_alloc_frame();
|
||||
if ( !tmp_picture )
|
||||
{
|
||||
Panic( "Could not allocate temporary opicture" );
|
||||
}
|
||||
int size = avpicture_get_size( PIX_FMT_RGB24, width, height);
|
||||
uint8_t *tmp_picture_buf = (uint8_t *)malloc(size);
|
||||
if (!tmp_picture_buf)
|
||||
{
|
||||
av_free( tmp_picture );
|
||||
Panic( "Could not allocate temporary opicture" );
|
||||
}
|
||||
avpicture_fill( (AVPicture *)tmp_picture, tmp_picture_buf, PIX_FMT_RGB24, width, height );
|
||||
//}
|
||||
}
|
||||
|
||||
if ( !buffer.size() )
|
||||
return( -1 );
|
||||
|
||||
AVPacket packet;
|
||||
av_init_packet( &packet );
|
||||
int initialFrameCount = frameCount;
|
||||
while ( buffer.size() > 0 )
|
||||
int frameComplete = false;
|
||||
|
||||
while ( !frameComplete )
|
||||
{
|
||||
int got_picture = false;
|
||||
packet.data = buffer.head();
|
||||
packet.size = buffer.size();
|
||||
int len = avcodec_decode_video2( codecContext, picture, &got_picture, &packet );
|
||||
int len = avcodec_decode_video2( mCodecContext, mRawFrame, &frameComplete, &packet );
|
||||
if ( len < 0 )
|
||||
{
|
||||
if ( frameCount > initialFrameCount )
|
||||
{
|
||||
// Decoded at least one frame
|
||||
return( 0 );
|
||||
}
|
||||
Error( "Error while decoding frame %d", frameCount );
|
||||
Hexdump( Logger::ERROR, buffer.head(), buffer.size()>256?256:buffer.size() );
|
||||
buffer.clear();
|
||||
continue;
|
||||
//return( -1 );
|
||||
}
|
||||
Debug( 2, "Frame: %d - %d/%d", frameCount, len, buffer.size() );
|
||||
//if ( buffer.size() < 400 )
|
||||
//Hexdump( 0, buffer.head(), buffer.size() );
|
||||
|
||||
if ( got_picture )
|
||||
if ( frameComplete )
|
||||
{
|
||||
/* the picture is allocated by the decoder. no need to free it */
|
||||
Debug( 1, "Got picture %d", frameCount );
|
||||
Debug( 3, "Got frame %d", frameCount );
|
||||
|
||||
avpicture_fill( (AVPicture *)mFrame, directbuffer, imagePixFormat, width, height);
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
static struct SwsContext *img_convert_ctx = 0;
|
||||
|
||||
if ( !img_convert_ctx )
|
||||
{
|
||||
img_convert_ctx = sws_getCachedContext( NULL, codecContext->width, codecContext->height, codecContext->pix_fmt, width, height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL );
|
||||
if ( !img_convert_ctx )
|
||||
Panic( "Unable to initialise image scaling context" );
|
||||
}
|
||||
|
||||
sws_scale( img_convert_ctx, picture->data, picture->linesize, 0, height, tmp_picture->data, tmp_picture->linesize );
|
||||
#endif
|
||||
image.Assign( width, height, colours, subpixelorder, tmp_picture->data[0], imagesize);
|
||||
|
||||
frameCount++;
|
||||
|
||||
return( 0 );
|
||||
}
|
||||
else
|
||||
{
|
||||
Warning( "Unable to get picture from frame" );
|
||||
if ( sws_scale( mConvertContext, mRawFrame->data, mRawFrame->linesize, 0, mCodecContext->height, mFrame->data, mFrame->linesize ) < 0 )
|
||||
Fatal( "Unable to convert raw format %u to target format %u at frame %d", mCodecContext->pix_fmt, imagePixFormat, frameCount );
|
||||
#else // HAVE_LIBSWSCALE
|
||||
Fatal( "You must compile ffmpeg with the --enable-swscale option to use RTSP cameras" );
|
||||
#endif // HAVE_LIBSWSCALE
|
||||
|
||||
frameCount++;
|
||||
}
|
||||
buffer -= len;
|
||||
}
|
||||
av_free_packet( &packet );
|
||||
}
|
||||
}
|
||||
return( -1 );
|
||||
return (0) ;
|
||||
}
|
||||
|
||||
int RemoteCameraRtsp::PostCapture()
|
||||
|
|
|
@ -46,11 +46,21 @@ protected:
|
|||
|
||||
RtspThread *rtspThread;
|
||||
|
||||
AVFormatContext *formatContext;
|
||||
AVCodec *codec;
|
||||
AVCodecContext *codecContext;
|
||||
AVFrame *picture;
|
||||
int frameCount;
|
||||
|
||||
#if HAVE_LIBAVFORMAT
|
||||
AVFormatContext *mFormatContext;
|
||||
int mVideoStreamId;
|
||||
AVCodecContext *mCodecContext;
|
||||
AVCodec *mCodec;
|
||||
AVFrame *mRawFrame;
|
||||
AVFrame *mFrame;
|
||||
PixelFormat imagePixFormat;
|
||||
#endif // HAVE_LIBAVFORMAT
|
||||
|
||||
#if HAVE_LIBSWSCALE
|
||||
struct SwsContext *mConvertContext;
|
||||
#endif
|
||||
|
||||
public:
|
||||
RemoteCameraRtsp( int p_id, const std::string &method, const std::string &host, const std::string &port, const std::string &path, int p_width, int p_height, int p_colours, int p_brightness, int p_contrast, int p_hue, int p_colour, bool p_capture );
|
||||
|
|
|
@ -664,11 +664,11 @@ bool Zone::CheckAlarms( const Image *delta_image )
|
|||
|
||||
if ( type == INCLUSIVE )
|
||||
{
|
||||
score <<= 1;
|
||||
score /= 2;
|
||||
}
|
||||
else if ( type == EXCLUSIVE )
|
||||
{
|
||||
score >>= 1;
|
||||
score *= 2;
|
||||
}
|
||||
|
||||
Debug( 5, "Adjusted score is %d", score );
|
||||
|
|
|
@ -72,7 +72,7 @@ else
|
|||
'LabelFormat' => '%N - %d/%m/%y %H:%M:%S',
|
||||
'LabelX' => 0,
|
||||
'LabelY' => 0,
|
||||
'ImageBufferCount' => 60,
|
||||
'ImageBufferCount' => 50,
|
||||
'WarmupCount' => 25,
|
||||
'PreEventCount' => 25,
|
||||
'PostEventCount' => 25,
|
||||
|
@ -91,14 +91,14 @@ else
|
|||
'SectionLength' => 600,
|
||||
'FrameSkip' => 0,
|
||||
'EventPrefix' => 'Event-',
|
||||
'MaxFPS' => "5",
|
||||
'AlarmMaxFPS' => "5",
|
||||
'MaxFPS' => "",
|
||||
'AlarmMaxFPS' => "",
|
||||
'FPSReportInterval' => 1000,
|
||||
'RefBlendPerc' => 7,
|
||||
'DefaultView' => 'Events',
|
||||
'DefaultRate' => '100',
|
||||
'DefaultScale' => '100',
|
||||
'SignalCheckColour' => '#0000C0',
|
||||
'SignalCheckColour' => '#0000c0',
|
||||
'WebColour' => 'red',
|
||||
'Triggers' => "",
|
||||
);
|
||||
|
@ -139,7 +139,7 @@ if ( $newMonitor['AlarmMaxFPS'] == '0.00' )
|
|||
|
||||
if ( !empty($_REQUEST['preset']) )
|
||||
{
|
||||
$preset = dbFetchOne( "select Type, Device, Channel, Format, Protocol, Method, Host, Port, Path, Width, Height, Colours, Palette, MaxFPS, Controllable, ControlId, ControlDevice, ControlAddress, DefaultRate, DefaultScale from MonitorPresets where Id = '".dbEscape($_REQUEST['preset'])."'" );
|
||||
$preset = dbFetchOne( "select Type, Device, Channel, Format, Protocol, Method, Host, Port, Path, Width, Height, Palette, MaxFPS, Controllable, ControlId, ControlDevice, ControlAddress, DefaultRate, DefaultScale from MonitorPresets where Id = '".dbEscape($_REQUEST['preset'])."'" );
|
||||
foreach ( $preset as $name=>$value )
|
||||
{
|
||||
if ( isset($value) )
|
||||
|
@ -150,7 +150,7 @@ if ( !empty($_REQUEST['preset']) )
|
|||
}
|
||||
if ( !empty($_REQUEST['probe']) )
|
||||
{
|
||||
$probe = unserialize( $_REQUEST['probe'] );
|
||||
$probe = unserialize(base64_decode($_REQUEST['probe']));
|
||||
foreach ( $probe as $name=>$value )
|
||||
{
|
||||
if ( isset($value) )
|
||||
|
|
|
@ -27,7 +27,7 @@ if ( !canEdit( 'Monitors' ) )
|
|||
$cameras = array();
|
||||
$cameras[0] = $SLANG['ChooseDetectedCamera'];
|
||||
|
||||
if ( ZM_V4L2 )
|
||||
if ( ZM_HAS_V4L2 )
|
||||
{
|
||||
// Probe Local Cameras
|
||||
//
|
||||
|
@ -45,7 +45,7 @@ if ( ZM_V4L2 )
|
|||
|
||||
$devices = array();
|
||||
$preferredStandards = array( 'PAL', 'NTSC' );
|
||||
$preferredFormats = array( '422P', 'YUYV', 'BGR3' );
|
||||
$preferredFormats = array( 'BGR4', 'RGB4', 'BGR3', 'RGB3', 'YUYV', '422P', 'GREY');
|
||||
foreach ( $output as $line )
|
||||
{
|
||||
if ( !preg_match( '/^d:([^|]+).*S:([^|]*).*F:([^|]+).*I:(\d+)\|(.+)$/', $line, $deviceMatches ) )
|
||||
|
@ -83,6 +83,7 @@ if ( ZM_V4L2 )
|
|||
'Type' => 'Local',
|
||||
'Device' => $deviceMatches[1],
|
||||
'Channel' => $i,
|
||||
'Colours' => 4,
|
||||
'Format' => $preferredStandard,
|
||||
'Palette' => $preferredFormat,
|
||||
);
|
||||
|
@ -93,10 +94,15 @@ if ( ZM_V4L2 )
|
|||
}
|
||||
else
|
||||
{
|
||||
$inputMonitor['Width'] = 352;
|
||||
$inputMonitor['Width'] = 384;
|
||||
$inputMonitor['Height'] = 288;
|
||||
}
|
||||
$inputDesc = htmlspecialchars(serialize($inputMonitor));
|
||||
if ( $preferredFormat == 'GREY' )
|
||||
{
|
||||
$inputMonitor['Colours'] = 1;
|
||||
$inputMonitor['SignalCheckColour'] = '#000023';
|
||||
}
|
||||
$inputDesc = base64_encode(serialize($inputMonitor));
|
||||
$inputString = $deviceMatches[1].', chan '.$i.($input['free']?(" - ".$SLANG['Available']):(" (".$monitors[$input['id']]['Name'].")"));
|
||||
$inputs[] = $input;
|
||||
$cameras[$inputDesc] = $inputString;
|
||||
|
@ -120,7 +126,7 @@ function probeAxis( $ip )
|
|||
'Host' => $ip,
|
||||
'Port' => 80,
|
||||
'Path' => '/axis-cgi/mjpg/video.cgi?resolution=320x240',
|
||||
'Palette' => 3,
|
||||
'Colours' => 4,
|
||||
'Width' => 320,
|
||||
'Height' => 240,
|
||||
),
|
||||
|
@ -154,7 +160,7 @@ function probePana( $ip )
|
|||
'Host' => $ip,
|
||||
'Port' => 80,
|
||||
'Path' => '/nphMotionJpeg?Resolution=320x240&Quality=Standard',
|
||||
'Palette' => 3,
|
||||
'Colours' => 4,
|
||||
'Width' => 320,
|
||||
'Height' => 240,
|
||||
),
|
||||
|
@ -174,7 +180,7 @@ function probeActi( $ip )
|
|||
'Host' => 'Admin:123456@'.$ip,
|
||||
'Port' => 7070,
|
||||
'Path' => '',
|
||||
'Palette' => 3,
|
||||
'Colours' => 4,
|
||||
'Width' => 320,
|
||||
'Height' => 240,
|
||||
),
|
||||
|
@ -209,7 +215,7 @@ function probeVivotek( $ip )
|
|||
'Host' => $ip,
|
||||
'Port' => 554,
|
||||
'Path' => '',
|
||||
'Palette' => 3,
|
||||
'Colours' => 4,
|
||||
'Width' => 352,
|
||||
'Height' => 240,
|
||||
),
|
||||
|
|
Loading…
Reference in New Issue