LocalCamera: Remove support for V4L1

Kernel support for Video4Linux 1 was removed in 2.6.38 (March 2011). It's time to drop support on our side.
This commit is contained in:
Peter Keresztes Schmidt 2021-05-31 23:23:55 +02:00
parent ed7759897b
commit f2bee0704a
7 changed files with 4 additions and 803 deletions

View File

@ -264,10 +264,6 @@ if(ZM_SYSTEMD OR (IS_DIRECTORY /usr/lib/systemd/system) OR (IS_DIRECTORY /lib/sy
endif() endif()
# System checks # System checks
check_include_file("libv4l1-videodev.h" HAVE_LIBV4L1_VIDEODEV_H)
if(NOT HAVE_LIBV4L1_VIDEODEV_H)
check_include_file("linux/videodev.h" HAVE_LINUX_VIDEODEV_H)
endif()
check_include_file("linux/videodev2.h" HAVE_LINUX_VIDEODEV2_H) check_include_file("linux/videodev2.h" HAVE_LINUX_VIDEODEV2_H)
check_include_file("execinfo.h" HAVE_EXECINFO_H) check_include_file("execinfo.h" HAVE_EXECINFO_H)
if(HAVE_EXECINFO_H) if(HAVE_EXECINFO_H)
@ -638,22 +634,15 @@ if((NOT HAVE_LIBJWT) AND (NOT HAVE_LIBOPENSSL))
message(FATAL_ERROR "Using the jwt-cpp backend requires OpenSSL as crypto backend.") message(FATAL_ERROR "Using the jwt-cpp backend requires OpenSSL as crypto backend.")
endif() endif()
# Check for V4L header files and enable ZM_HAS_V4L, ZM_HAS_V4L1, ZM_HAS_V4L2 accordingly # Check for V4L header files and enable ZM_HAS_V4L, ZM_HAS_V4L2 accordingly
# Setting to zeros first is required because ZM uses #define for these # Setting to zeros first is required because ZM uses #define for these
set(ZM_HAS_V4L 0) set(ZM_HAS_V4L 0)
set(ZM_HAS_V4L1 0)
set(ZM_HAS_V4L2 0) set(ZM_HAS_V4L2 0)
if(HAVE_LINUX_VIDEODEV_H OR HAVE_LIBV4L1_VIDEODEV_H)
set(ZM_HAS_V4L 1)
set(ZM_HAS_V4L1 1)
endif()
if(HAVE_LINUX_VIDEODEV2_H) if(HAVE_LINUX_VIDEODEV2_H)
set(ZM_HAS_V4L 1) set(ZM_HAS_V4L 1)
set(ZM_HAS_V4L2 1) set(ZM_HAS_V4L2 1)
endif() endif()
if((NOT HAVE_LINUX_VIDEODEV_H) if(NOT HAVE_LINUX_VIDEODEV2_H)
AND (NOT HAVE_LIBV4L1_VIDEODEV_H)
AND (NOT HAVE_LINUX_VIDEODEV2_H))
message(AUTHOR_WARNING "Video 4 Linux headers weren't found - Analog and USB camera support will not be available") message(AUTHOR_WARNING "Video 4 Linux headers weren't found - Analog and USB camera support will not be available")
endif() endif()
# Check for PCRE and enable ZM_PCRE accordingly # Check for PCRE and enable ZM_PCRE accordingly

View File

@ -175,76 +175,6 @@ static _AVPIXELFORMAT getFfPixFormatFromV4lPalette(int v4l_version, int palette)
} // end switch palette } // end switch palette
} // end if v4l2 } // end if v4l2
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
switch( palette ) {
case VIDEO_PALETTE_RGB32 :
if ( BigEndian )
pixFormat = AV_PIX_FMT_ARGB;
else
pixFormat = AV_PIX_FMT_BGRA;
break;
case VIDEO_PALETTE_RGB24 :
if ( BigEndian )
pixFormat = AV_PIX_FMT_RGB24;
else
pixFormat = AV_PIX_FMT_BGR24;
break;
case VIDEO_PALETTE_GREY :
pixFormat = AV_PIX_FMT_GRAY8;
break;
case VIDEO_PALETTE_RGB555 :
pixFormat = AV_PIX_FMT_RGB555;
break;
case VIDEO_PALETTE_RGB565 :
pixFormat = AV_PIX_FMT_RGB565;
break;
case VIDEO_PALETTE_YUYV :
case VIDEO_PALETTE_YUV422 :
pixFormat = AV_PIX_FMT_YUYV422;
break;
case VIDEO_PALETTE_YUV422P :
pixFormat = AV_PIX_FMT_YUV422P;
break;
case VIDEO_PALETTE_YUV420P :
pixFormat = AV_PIX_FMT_YUV420P;
break;
default :
{
Fatal("Can't find swscale format for palette %d", palette);
break;
#if 0
// These are all spare and may match some of the above
pixFormat = AV_PIX_FMT_YUVJ420P;
pixFormat = AV_PIX_FMT_YUVJ422P;
pixFormat = AV_PIX_FMT_YUVJ444P;
pixFormat = AV_PIX_FMT_UYVY422;
pixFormat = AV_PIX_FMT_UYYVYY411;
pixFormat = AV_PIX_FMT_BGR565;
pixFormat = AV_PIX_FMT_BGR555;
pixFormat = AV_PIX_FMT_BGR8;
pixFormat = AV_PIX_FMT_BGR4;
pixFormat = AV_PIX_FMT_BGR4_BYTE;
pixFormat = AV_PIX_FMT_RGB8;
pixFormat = AV_PIX_FMT_RGB4;
pixFormat = AV_PIX_FMT_RGB4_BYTE;
pixFormat = AV_PIX_FMT_NV12;
pixFormat = AV_PIX_FMT_NV21;
pixFormat = AV_PIX_FMT_RGB32_1;
pixFormat = AV_PIX_FMT_BGR32_1;
pixFormat = AV_PIX_FMT_GRAY16BE;
pixFormat = AV_PIX_FMT_GRAY16LE;
pixFormat = AV_PIX_FMT_YUV440P;
pixFormat = AV_PIX_FMT_YUVJ440P;
pixFormat = AV_PIX_FMT_YUVA420P;
//pixFormat = AV_PIX_FMT_VDPAU_H264;
//pixFormat = AV_PIX_FMT_VDPAU_MPEG1;
//pixFormat = AV_PIX_FMT_VDPAU_MPEG2;
#endif
}
} // end switch palette
} // end if v4l1
#endif // ZM_HAS_V4L1
return pixFormat; return pixFormat;
} // end getFfPixFormatFromV4lPalette } // end getFfPixFormatFromV4lPalette
#endif // HAVE_LIBSWSCALE #endif // HAVE_LIBSWSCALE
@ -296,9 +226,6 @@ int LocalCamera::v4l_version = 0;
#if ZM_HAS_V4L2 #if ZM_HAS_V4L2
LocalCamera::V4L2Data LocalCamera::v4l2_data; LocalCamera::V4L2Data LocalCamera::v4l2_data;
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
LocalCamera::V4L1Data LocalCamera::v4l1_data;
#endif // ZM_HAS_V4L1
#if HAVE_LIBSWSCALE #if HAVE_LIBSWSCALE
AVFrame **LocalCamera::capturePictures = nullptr; AVFrame **LocalCamera::capturePictures = nullptr;
@ -537,118 +464,6 @@ LocalCamera::LocalCamera(
} // end if v4l2 } // end if v4l2
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
/* V4L1 format matching */
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
/* Try to find a match for the selected palette and target colourspace */
/* RGB32 palette and 32bit target colourspace */
if ( palette == VIDEO_PALETTE_RGB32 && colours == ZM_COLOUR_RGB32 ) {
conversion_type = 0;
if ( BigEndian ) {
subpixelorder = ZM_SUBPIX_ORDER_ARGB;
} else {
subpixelorder = ZM_SUBPIX_ORDER_BGRA;
}
/* RGB24 palette and 24bit target colourspace */
} else if ( palette == VIDEO_PALETTE_RGB24 && colours == ZM_COLOUR_RGB24 ) {
conversion_type = 0;
if ( BigEndian ) {
subpixelorder = ZM_SUBPIX_ORDER_RGB;
} else {
subpixelorder = ZM_SUBPIX_ORDER_BGR;
}
/* Grayscale palette and grayscale target colourspace */
} else if ( palette == VIDEO_PALETTE_GREY && colours == ZM_COLOUR_GRAY8 ) {
conversion_type = 0;
subpixelorder = ZM_SUBPIX_ORDER_NONE;
/* Unable to find a solution for the selected palette and target colourspace. Conversion required. Notify the user of performance penalty */
} else {
if ( capture )
Info("No direct match for the selected palette and target colorspace. Format conversion is required, performance penalty expected");
#if HAVE_LIBSWSCALE
/* Try using swscale for the conversion */
conversion_type = 1;
Debug(2,"Using swscale for image conversion");
if ( colours == ZM_COLOUR_RGB32 ) {
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
imagePixFormat = AV_PIX_FMT_RGBA;
} else if ( colours == ZM_COLOUR_RGB24 ) {
subpixelorder = ZM_SUBPIX_ORDER_RGB;
imagePixFormat = AV_PIX_FMT_RGB24;
} else if ( colours == ZM_COLOUR_GRAY8 ) {
subpixelorder = ZM_SUBPIX_ORDER_NONE;
imagePixFormat = AV_PIX_FMT_GRAY8;
} else {
Panic("Unexpected colours: %u", colours);
}
if ( capture ) {
if ( !sws_isSupportedInput(capturePixFormat) ) {
Error("swscale does not support the used capture format");
conversion_type = 2; /* Try ZM format conversions */
}
if ( !sws_isSupportedOutput(imagePixFormat) ) {
Error("swscale does not support the target format");
conversion_type = 2; /* Try ZM format conversions */
}
}
/* Our YUYV->Grayscale conversion is a lot faster than swscale's */
if ( colours == ZM_COLOUR_GRAY8 && (palette == VIDEO_PALETTE_YUYV || palette == VIDEO_PALETTE_YUV422) ) {
conversion_type = 2;
}
#else
/* Don't have swscale, see what we can do */
conversion_type = 2;
#endif
if ( conversion_type == 2 ) {
Debug(2,"Using ZM for image conversion");
if ( palette == VIDEO_PALETTE_RGB32 && colours == ZM_COLOUR_GRAY8 ) {
if ( BigEndian ) {
conversion_fptr = &std_convert_argb_gray8;
subpixelorder = ZM_SUBPIX_ORDER_NONE;
} else {
conversion_fptr = &std_convert_bgra_gray8;
subpixelorder = ZM_SUBPIX_ORDER_NONE;
}
} else if ( (palette == VIDEO_PALETTE_YUYV || palette == VIDEO_PALETTE_YUV422) && colours == ZM_COLOUR_GRAY8 ) {
/* Fast YUYV->Grayscale conversion by extracting the Y channel */
if ( config.cpu_extensions && sse_version >= 35 ) {
conversion_fptr = &ssse3_convert_yuyv_gray8;
Debug(2,"Using SSSE3 YUYV->grayscale fast conversion");
} else {
conversion_fptr = &std_convert_yuyv_gray8;
Debug(2,"Using standard YUYV->grayscale fast conversion");
}
subpixelorder = ZM_SUBPIX_ORDER_NONE;
} else if ( (palette == VIDEO_PALETTE_YUYV || palette == VIDEO_PALETTE_YUV422) && colours == ZM_COLOUR_RGB24 ) {
conversion_fptr = &zm_convert_yuyv_rgb;
subpixelorder = ZM_SUBPIX_ORDER_RGB;
} else if ( (palette == VIDEO_PALETTE_YUYV || palette == VIDEO_PALETTE_YUV422) && colours == ZM_COLOUR_RGB32 ) {
conversion_fptr = &zm_convert_yuyv_rgba;
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
} else if ( palette == VIDEO_PALETTE_RGB555 && colours == ZM_COLOUR_RGB24 ) {
conversion_fptr = &zm_convert_rgb555_rgb;
subpixelorder = ZM_SUBPIX_ORDER_RGB;
} else if ( palette == VIDEO_PALETTE_RGB555 && colours == ZM_COLOUR_RGB32 ) {
conversion_fptr = &zm_convert_rgb555_rgba;
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
} else if ( palette == VIDEO_PALETTE_RGB565 && colours == ZM_COLOUR_RGB24 ) {
conversion_fptr = &zm_convert_rgb565_rgb;
subpixelorder = ZM_SUBPIX_ORDER_RGB;
} else if ( palette == VIDEO_PALETTE_RGB565 && colours == ZM_COLOUR_RGB32 ) {
conversion_fptr = &zm_convert_rgb565_rgba;
subpixelorder = ZM_SUBPIX_ORDER_RGBA;
} else {
Fatal("Unable to find a suitable format conversion for the selected palette and target colorspace.");
}
} // end if conversion_type == 2
}
}
#endif // ZM_HAS_V4L1
last_camera = this; last_camera = this;
Debug(3, "Selected subpixelorder: %u", subpixelorder); Debug(3, "Selected subpixelorder: %u", subpixelorder);
@ -975,169 +790,6 @@ void LocalCamera::Initialise() {
Colour(colour); Colour(colour);
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
Debug(3, "Configuring picture attributes");
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(vid_pic));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 )
Fatal("Failed to get picture attributes: %s", strerror(errno));
Debug(4,
"Old Palette:%d, depth:%d, brightness:%d, hue:%d, colour:%d, contrast:%d",
vid_pic.palette,
vid_pic.depth,
vid_pic.brightness,
vid_pic.hue,
vid_pic.colour,
vid_pic.contrast
);
switch (vid_pic.palette = palette) {
case VIDEO_PALETTE_RGB32 :
vid_pic.depth = 32;
break;
case VIDEO_PALETTE_RGB24 :
vid_pic.depth = 24;
break;
case VIDEO_PALETTE_GREY :
vid_pic.depth = 8;
break;
case VIDEO_PALETTE_RGB565 :
case VIDEO_PALETTE_YUYV :
case VIDEO_PALETTE_YUV422 :
case VIDEO_PALETTE_YUV420P :
case VIDEO_PALETTE_YUV422P :
default:
vid_pic.depth = 16;
break;
}
if ( brightness >= 0 ) vid_pic.brightness = brightness;
if ( hue >= 0 ) vid_pic.hue = hue;
if ( colour >= 0 ) vid_pic.colour = colour;
if ( contrast >= 0 ) vid_pic.contrast = contrast;
if ( ioctl(vid_fd, VIDIOCSPICT, &vid_pic) < 0 ) {
Error("Failed to set picture attributes: %s", strerror(errno));
if ( config.strict_video_config )
exit(-1);
}
Debug(3, "Configuring window attributes");
struct video_window vid_win;
memset(&vid_win, 0, sizeof(vid_win));
if ( ioctl(vid_fd, VIDIOCGWIN, &vid_win) < 0 ) {
Fatal("Failed to get window attributes: %s", strerror(errno));
}
Debug(4, "Old X:%d Y:%d W:%d H:%d",
vid_win.x, vid_win.y, vid_win.width, vid_win.height);
vid_win.x = 0;
vid_win.y = 0;
vid_win.width = width;
vid_win.height = height;
vid_win.flags &= ~VIDEO_WINDOW_INTERLACE;
if ( ioctl(vid_fd, VIDIOCSWIN, &vid_win) < 0 ) {
Error("Failed to set window attributes: %s", strerror(errno));
if ( config.strict_video_config )
exit(-1);
}
Info("vid_win.width = %08x, vid_win.height = %08x, vid_win.flags = %08x",
vid_win.width, vid_win.height, vid_win.flags);
Debug(3, "Setting up request buffers");
if ( ioctl(vid_fd, VIDIOCGMBUF, &v4l1_data.frames) < 0 )
Fatal("Failed to setup memory: %s", strerror(errno));
if ( channel_count > 1 && !v4l_multi_buffer )
v4l1_data.frames.frames = 1;
v4l1_data.buffers = new video_mmap[v4l1_data.frames.frames];
Debug(4, "vmb.frames = %d, vmb.size = %d",
v4l1_data.frames.frames, v4l1_data.frames.size);
Debug(3, "Setting up %d frame buffers", v4l1_data.frames.frames);
v4l1_data.bufptr = (unsigned char *)mmap(0, v4l1_data.frames.size, PROT_READ|PROT_WRITE, MAP_SHARED, vid_fd, 0);
if ( v4l1_data.bufptr == MAP_FAILED )
Fatal("Could not mmap video: %s", strerror(errno));
#if HAVE_LIBSWSCALE
capturePictures = new AVFrame *[v4l1_data.frames.frames];
for ( int i = 0; i < v4l1_data.frames.frames; i++ ) {
v4l1_data.buffers[i].frame = i;
v4l1_data.buffers[i].width = width;
v4l1_data.buffers[i].height = height;
v4l1_data.buffers[i].format = palette;
#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101)
capturePictures[i] = av_frame_alloc();
#else
capturePictures[i] = avcodec_alloc_frame();
#endif
if ( !capturePictures[i] )
Fatal("Could not allocate picture");
#if LIBAVUTIL_VERSION_CHECK(54, 6, 0, 6, 0)
av_image_fill_arrays(
capturePictures[i]->data,
capturePictures[i]->linesize,
(unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i],
capturePixFormat, width, height, 1);
#else
avpicture_fill(
(AVPicture *)capturePictures[i],
(unsigned char *)v4l1_data.bufptr+v4l1_data.frames.offsets[i],
capturePixFormat, width, height );
#endif
}
#endif // HAVE_LIBSWSCALE
Debug(3, "Configuring video source");
struct video_channel vid_src;
memset(&vid_src, 0, sizeof(vid_src));
vid_src.channel = channel;
if ( ioctl(vid_fd, VIDIOCGCHAN, &vid_src) < 0 )
Fatal("Failed to get camera source: %s", strerror(errno));
Debug(4, "Old C:%d, F:%d, Fl:%x, T:%d",
vid_src.channel, vid_src.norm, vid_src.flags, vid_src.type);
vid_src.norm = standard;
vid_src.flags = 0;
vid_src.type = VIDEO_TYPE_CAMERA;
if ( ioctl(vid_fd, VIDIOCSCHAN, &vid_src) < 0 ) {
Error("Failed to set camera source %d: %s", channel, strerror(errno));
if ( config.strict_video_config )
exit(-1);
}
if ( ioctl(vid_fd, VIDIOCGWIN, &vid_win) < 0 )
Fatal("Failed to get window data: %s", strerror(errno));
Info("vid_win.width = %08x, vid_win.height = %08x, vid_win.flags = %08x",
vid_win.width, vid_win.height, vid_win.flags);
Debug(4, "New X:%d Y:%d W:%d H:%d",
vid_win.x, vid_win.y, vid_win.width, vid_win.height);
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 )
Fatal("Failed to get window data: %s", strerror(errno));
Debug(4,
"New Palette:%d, depth:%d, brightness:%d, hue:%d, colour:%d, contrast:%d",
vid_pic.palette,
vid_pic.depth,
vid_pic.brightness,
vid_pic.hue,
vid_pic.colour,
vid_pic.contrast
);
} // end if v4l
#endif // ZM_HAS_V4L1
} // end LocalCamera::Initialize } // end LocalCamera::Initialize
void LocalCamera::Terminate() { void LocalCamera::Terminate() {
@ -1166,27 +818,6 @@ void LocalCamera::Terminate() {
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
#if HAVE_LIBSWSCALE
for ( int i=0; i < v4l1_data.frames.frames; i++ ) {
/* Free capture pictures */
#if LIBAVCODEC_VERSION_CHECK(55, 28, 1, 45, 101)
av_frame_free(&capturePictures[i]);
#else
av_freep(&capturePictures[i]);
#endif
}
#endif
Debug(3, "Unmapping video buffers");
if ( munmap((char*)v4l1_data.bufptr, v4l1_data.frames.size) < 0 )
Error("Failed to munmap buffers: %s", strerror(errno));
delete[] v4l1_data.buffers;
} // end if using v4l1
#endif // ZM_HAS_V4L1
close(vid_fd); close(vid_fd);
primed = false; primed = false;
} // end LocalCamera::Terminate } // end LocalCamera::Terminate
@ -1592,199 +1223,6 @@ bool LocalCamera::GetCurrentSettings(
*(output_ptr-1) = '\n'; *(output_ptr-1) = '\n';
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( version == 1 ) {
struct video_capability vid_cap;
memset(&vid_cap, 0, sizeof(video_capability));
if ( ioctl(vid_fd, VIDIOCGCAP, &vid_cap) < 0 ) {
Error("Failed to get video capabilities: %s", strerror(errno));
if ( verbose )
output_ptr += sprintf(output_ptr,
"Error, failed to get video capabilities %s: %s\n",
queryDevice, strerror(errno));
else
output_ptr += sprintf(output_ptr, "error%d\n", errno);
return false;
}
if ( verbose ) {
output_ptr += sprintf( output_ptr, "Video Capabilities\n"
" Name: %s\n"
" Type: %d\n%s%s%s%s%s%s%s%s%s%s%s%s%s%s"
" Video Channels: %d\n"
" Audio Channels: %d\n"
" Maximum Width: %d\n"
" Maximum Height: %d\n"
" Minimum Width: %d\n"
" Minimum Height: %d\n",
vid_cap.name,
vid_cap.type,
(vid_cap.type&VID_TYPE_CAPTURE)?" Can capture\n":"",
(vid_cap.type&VID_TYPE_TUNER)?" Can tune\n":"",
(vid_cap.type&VID_TYPE_TELETEXT)?" Does teletext\n":"",
(vid_cap.type&VID_TYPE_OVERLAY)?" Overlay onto frame buffer\n":"",
(vid_cap.type&VID_TYPE_CHROMAKEY)?" Overlay by chromakey\n":"",
(vid_cap.type&VID_TYPE_CLIPPING)?" Can clip\n":"",
(vid_cap.type&VID_TYPE_FRAMERAM)?" Uses the frame buffer memory\n":"",
(vid_cap.type&VID_TYPE_SCALES)?" Scalable\n":"",
(vid_cap.type&VID_TYPE_MONOCHROME)?" Monochrome only\n":"",
(vid_cap.type&VID_TYPE_SUBCAPTURE)?" Can capture subareas of the image\n":"",
(vid_cap.type&VID_TYPE_MPEG_DECODER)?" Can decode MPEG streams\n":"",
(vid_cap.type&VID_TYPE_MPEG_ENCODER)?" Can encode MPEG streams\n":"",
(vid_cap.type&VID_TYPE_MJPEG_DECODER)?" Can decode MJPEG streams\n":"",
(vid_cap.type&VID_TYPE_MJPEG_ENCODER)?" Can encode MJPEG streams\n":"",
vid_cap.channels,
vid_cap.audios,
vid_cap.maxwidth,
vid_cap.maxheight,
vid_cap.minwidth,
vid_cap.minheight );
} else {
output_ptr += sprintf(output_ptr, "N:%s|T:%d|nC:%d|nA:%d|mxW:%d|mxH:%d|mnW:%d|mnH:%d|"
, vid_cap.name
, vid_cap.type
, vid_cap.channels
, vid_cap.audios
, vid_cap.maxwidth
, vid_cap.maxheight
, vid_cap.minwidth
, vid_cap.minheight);
}
struct video_window vid_win;
memset(&vid_win, 0, sizeof(video_window));
if ( ioctl(vid_fd, VIDIOCGWIN, &vid_win) < 0 ) {
Error("Failed to get window attributes: %s", strerror(errno));
if ( verbose )
output_ptr += sprintf(output_ptr, "Error, failed to get window attributes: %s\n", strerror(errno));
else
output_ptr += sprintf(output_ptr, "error%d\n", errno);
return false;
}
if ( verbose ) {
output_ptr += sprintf(output_ptr,
"Window Attributes\n"
" X Offset: %d\n"
" Y Offset: %d\n"
" Width: %d\n"
" Height: %d\n"
, vid_win.x
, vid_win.y
, vid_win.width
, vid_win.height );
} else {
output_ptr += sprintf(output_ptr, "X:%d|Y:%d|W:%d|H:%d|",
vid_win.height, vid_win.x, vid_win.y, vid_win.width);
}
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(video_picture));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 ) {
Error("Failed to get picture attributes: %s", strerror(errno));
if ( verbose )
output_ptr += sprintf(output_ptr, "Error, failed to get picture attributes: %s\n", strerror(errno));
else
output_ptr += sprintf(output_ptr, "error%d\n", errno);
return false;
}
if ( verbose ) {
output_ptr += sprintf(output_ptr,
"Picture Attributes\n"
" Palette: %d - %s\n"
" Colour Depth: %d\n"
" Brightness: %d\n"
" Hue: %d\n"
" Colour :%d\n"
" Contrast: %d\n"
" Whiteness: %d\n"
, vid_pic.palette,
vid_pic.palette==VIDEO_PALETTE_GREY?"Linear greyscale":(
vid_pic.palette==VIDEO_PALETTE_HI240?"High 240 cube (BT848)":(
vid_pic.palette==VIDEO_PALETTE_RGB565?"565 16 bit RGB":(
vid_pic.palette==VIDEO_PALETTE_RGB24?"24bit RGB":(
vid_pic.palette==VIDEO_PALETTE_RGB32?"32bit RGB":(
vid_pic.palette==VIDEO_PALETTE_RGB555?"555 15bit RGB":(
vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422 capture":(
vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":(
vid_pic.palette==VIDEO_PALETTE_UYVY?"UVYV":(
vid_pic.palette==VIDEO_PALETTE_YUV420?"YUV420":(
vid_pic.palette==VIDEO_PALETTE_YUV411?"YUV411 capture":(
vid_pic.palette==VIDEO_PALETTE_RAW?"RAW capture (BT848)":(
vid_pic.palette==VIDEO_PALETTE_YUYV?"YUYV":(
vid_pic.palette==VIDEO_PALETTE_YUV422?"YUV422":(
vid_pic.palette==VIDEO_PALETTE_YUV422P?"YUV 4:2:2 Planar":(
vid_pic.palette==VIDEO_PALETTE_YUV411P?"YUV 4:1:1 Planar":(
vid_pic.palette==VIDEO_PALETTE_YUV420P?"YUV 4:2:0 Planar":(
vid_pic.palette==VIDEO_PALETTE_YUV410P?"YUV 4:1:0 Planar":"Unknown"
))))))))))))))))),
vid_pic.depth,
vid_pic.brightness,
vid_pic.hue,
vid_pic.colour,
vid_pic.contrast,
vid_pic.whiteness
);
} else {
output_ptr += sprintf(output_ptr, "P:%d|D:%d|B:%d|h:%d|Cl:%d|Cn:%d|w:%d|",
vid_pic.palette,
vid_pic.depth,
vid_pic.brightness,
vid_pic.hue,
vid_pic.colour,
vid_pic.contrast,
vid_pic.whiteness
);
}
for ( int chan = 0; chan < vid_cap.channels; chan++ ) {
struct video_channel vid_src;
memset(&vid_src, 0, sizeof(video_channel));
vid_src.channel = chan;
if ( ioctl(vid_fd, VIDIOCGCHAN, &vid_src) < 0 ) {
Error("Failed to get channel %d attributes: %s", chan, strerror(errno));
if ( verbose )
output_ptr += sprintf(output_ptr, "Error, failed to get channel %d attributes: %s\n", chan, strerror(errno));
else
output_ptr += sprintf(output_ptr, "error%d\n", errno);
return false;
}
if ( verbose ) {
output_ptr += sprintf(output_ptr,
"Channel %d Attributes\n"
" Name: %s\n"
" Channel: %d\n"
" Flags: %d\n%s%s"
" Type: %d - %s\n"
" Format: %d - %s\n"
, chan
, vid_src.name
, vid_src.channel
, vid_src.flags
, (vid_src.flags&VIDEO_VC_TUNER)?" Channel has a tuner\n":""
, (vid_src.flags&VIDEO_VC_AUDIO)?" Channel has audio\n":""
, vid_src.type,
vid_src.type==VIDEO_TYPE_TV?"TV":(
vid_src.type==VIDEO_TYPE_CAMERA?"Camera":"Unknown"
)
, vid_src.norm,
vid_src.norm==VIDEO_MODE_PAL?"PAL":(
vid_src.norm==VIDEO_MODE_NTSC?"NTSC":(
vid_src.norm==VIDEO_MODE_SECAM?"SECAM":(
vid_src.norm==VIDEO_MODE_AUTO?"AUTO":"Unknown"
))));
} else {
output_ptr += sprintf(output_ptr, "n%d:%s|C%d:%d|Fl%d:%x|T%d:%d|F%d:%d%s|"
, chan, vid_src.name
, chan, vid_src.channel
, chan, vid_src.flags
, chan, vid_src.type
, chan, vid_src.norm, chan==(vid_cap.channels-1)?"":","
);
}
}
if ( !verbose )
*output_ptr = '\n';
}
#endif // ZM_HAS_V4L1
close(vid_fd); close(vid_fd);
if ( device ) if ( device )
break; break;
@ -1824,25 +1262,6 @@ int LocalCamera::Brightness(int p_brightness) {
return vid_control.value; return vid_control.value;
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(video_picture));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 ) {
Error("Failed to get picture attributes: %s", strerror(errno));
return -1;
}
if ( p_brightness >= 0 ) {
vid_pic.brightness = p_brightness;
if ( ioctl(vid_fd, VIDIOCSPICT, &vid_pic) < 0 ) {
Error("Failed to set picture attributes: %s", strerror(errno));
return -1;
}
}
return vid_pic.brightness;
}
#endif // ZM_HAS_V4L1
return -1; return -1;
} }
@ -1874,25 +1293,6 @@ int LocalCamera::Hue(int p_hue) {
return vid_control.value; return vid_control.value;
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(video_picture));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 ) {
Error("Failed to get picture attributes: %s", strerror(errno));
return -1;
}
if ( p_hue >= 0 ) {
vid_pic.hue = p_hue;
if ( ioctl(vid_fd, VIDIOCSPICT, &vid_pic) < 0 ) {
Error("Failed to set picture attributes: %s", strerror(errno));
return -1;
}
}
return vid_pic.hue;
}
#endif // ZM_HAS_V4L1
return -1; return -1;
} }
@ -1925,25 +1325,6 @@ int LocalCamera::Colour( int p_colour ) {
return vid_control.value; return vid_control.value;
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(video_picture));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0) {
Error("Failed to get picture attributes: %s", strerror(errno));
return -1;
}
if ( p_colour >= 0 ) {
vid_pic.colour = p_colour;
if ( ioctl(vid_fd, VIDIOCSPICT, &vid_pic) < 0 ) {
Error("Failed to set picture attributes: %s", strerror(errno));
return -1;
}
}
return vid_pic.colour;
}
#endif // ZM_HAS_V4L1
return -1; return -1;
} }
@ -1976,25 +1357,6 @@ int LocalCamera::Contrast( int p_contrast ) {
return vid_control.value; return vid_control.value;
} }
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
struct video_picture vid_pic;
memset(&vid_pic, 0, sizeof(video_picture));
if ( ioctl(vid_fd, VIDIOCGPICT, &vid_pic) < 0 ) {
Error("Failed to get picture attributes: %s", strerror(errno));
return -1;
}
if ( p_contrast >= 0 ) {
vid_pic.contrast = p_contrast;
if ( ioctl(vid_fd, VIDIOCSPICT, &vid_pic) < 0 ) {
Error("Failed to set picture attributes: %s", strerror(errno));
return -1;
}
}
return vid_pic.contrast;
}
#endif // ZM_HAS_V4L1
return -1; return -1;
} }
@ -2036,17 +1398,6 @@ int LocalCamera::PrimeCapture() {
} }
} // end if v4l_version == 2 } // end if v4l_version == 2
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
for ( int frame = 0; frame < v4l1_data.frames.frames; frame++ ) {
Debug(3, "Queueing frame %d", frame);
if ( ioctl(vid_fd, VIDIOCMCAPTURE, &v4l1_data.buffers[frame]) < 0 ) {
Error("Capture failure for frame %d: %s", frame, strerror(errno));
return -1;
}
}
}
#endif // ZM_HAS_V4L1
return 1; return 1;
} // end LocalCamera::PrimeCapture } // end LocalCamera::PrimeCapture
@ -2119,36 +1470,7 @@ int LocalCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
v4l2_data.fmt.fmt.pix.width, v4l2_data.fmt.fmt.pix.height, width, height); v4l2_data.fmt.fmt.pix.width, v4l2_data.fmt.fmt.pix.height, width, height);
} }
} // end if v4l2 } // end if v4l2
#if ZM_HAS_V4L1
else
#endif // ZM_HAS_V4L1
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
Debug(3, "Capturing %d frames", captures_per_frame);
while ( captures_per_frame ) {
Debug(3, "Syncing frame %d", v4l1_data.active_frame);
if ( ioctl(vid_fd, VIDIOCSYNC, &v4l1_data.active_frame) < 0 ) {
Error("Sync failure for frame %d buffer %d: %s",
v4l1_data.active_frame, captures_per_frame, strerror(errno) );
return -1;
}
captures_per_frame--;
if ( captures_per_frame ) {
Debug(3, "Capturing frame %d", v4l1_data.active_frame);
if ( ioctl(vid_fd, VIDIOCMCAPTURE, &v4l1_data.buffers[v4l1_data.active_frame]) < 0 ) {
Error("Capture failure for buffer %d (%d): %s",
v4l1_data.active_frame, captures_per_frame, strerror(errno));
return -1;
}
}
}
capture_frame = v4l1_data.active_frame;
Debug(3, "Captured %d for channel %d", capture_frame, channel);
buffer = v4l1_data.bufptr+v4l1_data.frames.offsets[capture_frame];
}
#endif // ZM_HAS_V4L1
#if ZM_HAS_V4L2 #if ZM_HAS_V4L2
if ( v4l_version == 2 ) { if ( v4l_version == 2 ) {
if ( channel_count > 1 ) { if ( channel_count > 1 ) {
@ -2174,40 +1496,7 @@ int LocalCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
Error("Unable to requeue buffer due to not v4l2_data"); Error("Unable to requeue buffer due to not v4l2_data");
} }
} }
#if ZM_HAS_V4L1
else
#endif // ZM_HAS_V4L1
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
if ( v4l_version == 1 ) {
if ( channel_count > 1 ) {
Debug(3, "Switching video source");
int next_channel = (channel_index+1)%channel_count;
struct video_channel vid_src;
memset(&vid_src, 0, sizeof(vid_src));
vid_src.channel = channel;
if ( ioctl(vid_fd, VIDIOCGCHAN, &vid_src) < 0 ) {
Error("Failed to get camera source %d: %s", channel, strerror(errno));
return -1;
}
vid_src.channel = channels[next_channel];
vid_src.norm = standards[next_channel];
vid_src.flags = 0;
vid_src.type = VIDEO_TYPE_CAMERA;
if ( ioctl(vid_fd, VIDIOCSCHAN, &vid_src) < 0 ) {
Error("Failed to set camera source %d: %s", channel, strerror(errno));
return -1;
}
}
Debug(3, "Requeueing frame %d", v4l1_data.active_frame);
if ( ioctl(vid_fd, VIDIOCMCAPTURE, &v4l1_data.buffers[v4l1_data.active_frame]) < 0 ) {
Error("Capture failure for frame %d: %s", v4l1_data.active_frame, strerror(errno));
return -1;
}
v4l1_data.active_frame = (v4l1_data.active_frame+1)%v4l1_data.frames.frames;
}
#endif // ZM_HAS_V4L1
} /* prime capture */ } /* prime capture */

View File

@ -24,21 +24,10 @@
#if ZM_HAS_V4L #if ZM_HAS_V4L
#ifdef HAVE_LINUX_VIDEODEV_H
#include <linux/videodev.h>
#endif // HAVE_LINUX_VIDEODEV_H
#ifdef HAVE_LIBV4L1_VIDEODEV_H
#include <libv4l1-videodev.h>
#endif // HAVE_LIB4VL1_VIDEODEV_H
#ifdef HAVE_LINUX_VIDEODEV2_H #ifdef HAVE_LINUX_VIDEODEV2_H
#include <linux/videodev2.h> #include <linux/videodev2.h>
#endif // HAVE_LINUX_VIDEODEV2_H #endif // HAVE_LINUX_VIDEODEV2_H
// Required on systems with v4l1 but without v4l2 headers
#ifndef VIDEO_MAX_FRAME
#define VIDEO_MAX_FRAME 32
#endif
// //
// Class representing 'local' cameras, i.e. those which are // Class representing 'local' cameras, i.e. those which are
// directly connect to the host machine and which are accessed // directly connect to the host machine and which are accessed
@ -62,15 +51,6 @@ protected:
}; };
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
struct V4L1Data {
int active_frame;
video_mbuf frames;
video_mmap *buffers;
unsigned char *bufptr;
};
#endif // ZM_HAS_V4L1
protected: protected:
std::string device; std::string device;
int channel; int channel;
@ -98,9 +78,6 @@ protected:
#if ZM_HAS_V4L2 #if ZM_HAS_V4L2
static V4L2Data v4l2_data; static V4L2Data v4l2_data;
#endif // ZM_HAS_V4L2 #endif // ZM_HAS_V4L2
#if ZM_HAS_V4L1
static V4L1Data v4l1_data;
#endif // ZM_HAS_V4L1
#if HAVE_LIBSWSCALE #if HAVE_LIBSWSCALE
static AVFrame **capturePictures; static AVFrame **capturePictures;

View File

@ -265,11 +265,7 @@ int main(int argc, char *argv[]) {
char *auth = nullptr; char *auth = nullptr;
std::string jwt_token_str = ""; std::string jwt_token_str = "";
#if ZM_HAS_V4L #if ZM_HAS_V4L
#if ZM_HAS_V4L2
int v4lVersion = 2; int v4lVersion = 2;
#elif ZM_HAS_V4L1
int v4lVersion = 1;
#endif // ZM_HAS_V4L2/1
#endif // ZM_HAS_V4L #endif // ZM_HAS_V4L
while (1) { while (1) {
int option_index = 0; int option_index = 0;

View File

@ -70,10 +70,9 @@ foreach ( $configvals as $key => $value ) {
define('ZMU_PATH', ZM_PATH_BIN.'/zmu'); // Local path to the ZoneMinder Utility define('ZMU_PATH', ZM_PATH_BIN.'/zmu'); // Local path to the ZoneMinder Utility
// //
// If setup supports Video 4 Linux v2 and/or v1 // If setup supports Video 4 Linux v2
// //
define('ZM_HAS_V4L2', '@ZM_HAS_V4L2@'); // V4L2 support enabled define('ZM_HAS_V4L2', '@ZM_HAS_V4L2@'); // V4L2 support enabled
define('ZM_HAS_V4L1', '@ZM_HAS_V4L1@'); // V4L1 support enabled
define('ZM_HAS_V4L', '@ZM_HAS_V4L@'); // V4L support enabled define('ZM_HAS_V4L', '@ZM_HAS_V4L@'); // V4L support enabled
// //

View File

@ -132,13 +132,10 @@ if ( !ZM_HAS_V4L )
$localMethods = array( $localMethods = array(
'v4l2' => 'Video For Linux version 2', 'v4l2' => 'Video For Linux version 2',
'v4l1' => 'Video For Linux version 1',
); );
if ( !ZM_HAS_V4L2 ) if ( !ZM_HAS_V4L2 )
unset($localMethods['v4l2']); unset($localMethods['v4l2']);
if ( !ZM_HAS_V4L1 )
unset($localMethods['v4l1']);
$remoteProtocols = array( $remoteProtocols = array(
'http' => 'HTTP', 'http' => 'HTTP',
@ -170,36 +167,6 @@ if ( !ZM_PCRE )
// Currently unsupported // Currently unsupported
unset($httpMethods['jpegTags']); unset($httpMethods['jpegTags']);
if ( ZM_HAS_V4L1 ) {
$v4l1DeviceFormats = array(
0 => 'PAL',
1 => 'NTSC',
2 => 'SECAM',
3 => 'AUTO',
4 => 'FMT4',
5 => 'FMT5',
6 => 'FMT6',
7 => 'FMT7'
);
$v4l1MaxChannels = 15;
$v4l1DeviceChannels = array();
for ( $i = 0; $i <= $v4l1MaxChannels; $i++ )
$v4l1DeviceChannels[$i] = $i;
$v4l1LocalPalettes = array(
1 => translate('Grey'),
5 => 'BGR32',
4 => 'BGR24',
8 => '*YUYV',
3 => '*RGB565',
6 => '*RGB555',
7 => '*YUV422',
13 => '*YUV422P',
15 => '*YUV420P',
);
}
if ( ZM_HAS_V4L2 ) { if ( ZM_HAS_V4L2 ) {
$v4l2DeviceFormats = array( $v4l2DeviceFormats = array(
0x000000ff => 'PAL', 0x000000ff => 'PAL',
@ -685,22 +652,7 @@ switch ( $name ) {
<td><?php echo htmlSelect('newMonitor[Method]', $localMethods, $monitor->Method(), array('onchange'=>'submitTab', 'data-tab-name'=>$tab) ); ?></td> <td><?php echo htmlSelect('newMonitor[Method]', $localMethods, $monitor->Method(), array('onchange'=>'submitTab', 'data-tab-name'=>$tab) ); ?></td>
</tr> </tr>
<?php <?php
if ( ZM_HAS_V4L1 && $monitor->Method() == 'v4l1' ) { if ( ZM_HAS_V4L2 && $monitor->Method() == 'v4l2' ) {
?>
<tr>
<td class="text-right pr-3"><?php echo translate('DeviceChannel') ?></td>
<td><?php echo htmlSelect('newMonitor[Channel]', $v4l1DeviceChannels, $monitor->Channel()); ?></td>
</tr>
<tr>
<td class="text-right pr-3"><?php echo translate('DeviceFormat') ?></td>
<td><?php echo htmlSelect('newMonitor[Format]', $v4l1DeviceFormats, $monitor->Format()); ?></td>
</tr>
<tr>
<td class="text-right pr-3"><?php echo translate('CapturePalette') ?></td>
<td><?php echo htmlSelect('newMonitor[Palette]', $v4l1LocalPalettes, $monitor->Palette()); ?></td>
</tr>
<?php
} else {
?> ?>
<tr> <tr>
<td class="text-right pr-3"><?php echo translate('DeviceChannel') ?></td> <td class="text-right pr-3"><?php echo translate('DeviceChannel') ?></td>

View File

@ -68,7 +68,6 @@
/* Few ZM options that are needed by the source code */ /* Few ZM options that are needed by the source code */
#cmakedefine ZM_MEM_MAPPED 1 #cmakedefine ZM_MEM_MAPPED 1
#cmakedefine ZM_HAS_V4L 1 #cmakedefine ZM_HAS_V4L 1
#cmakedefine ZM_HAS_V4L1 1
#cmakedefine ZM_HAS_V4L2 1 #cmakedefine ZM_HAS_V4L2 1
/* Its safe to assume that signal return type is void. This is a fix for zm_signal.h */ /* Its safe to assume that signal return type is void. This is a fix for zm_signal.h */