diff --git a/meson.build b/meson.build
index 068069e64..50b0f84fa 100644
--- a/meson.build
+++ b/meson.build
@@ -63,8 +63,7 @@ xml2_dep = dependency ('libxml-2.0', required: true)
libz_dep = dependency ('zlib', required: true)
usb_dep = dependency ('libusb-1.0', required: get_option ('usb'))
gmodule_dep = dependency ('gmodule-2.0', required: true)
-v4l2_dep = dependency('libv4l2', required: get_option('v4l2'))
-gudev_dep = dependency('gudev-1.0', required: get_option('v4l2'))
+gudev_dep = dependency ('gudev-1.0', required: get_option('v4l2'))
aravis_public_dependencies = [glib_dep, gobject_dep, gio_dep]
aravis_dependencies = [aravis_public_dependencies, xml2_dep, libz_dep, gmodule_dep]
@@ -77,8 +76,11 @@ if usb_dep.found()
aravis_dependencies += usb_dep
endif
-if v4l2_dep.found()
- aravis_dependencies += [v4l2_dep, gudev_dep]
+if gudev_dep.found()
+ aravis_dependencies += gudev_dep
+ v4l2_enabled = true
+else
+ v4l2_enabled = false
endif
if host_machine.system()=='windows'
diff --git a/src/arv-test.cfg b/src/arv-test.cfg
index c43030d1c..77e7f4f94 100644
--- a/src/arv-test.cfg
+++ b/src/arv-test.cfg
@@ -100,6 +100,15 @@ Schema=Invalid
SensorSize=2048;2048
SoftwareTriggerSupport=true
+[uvcvideo:Integrated_Webcam_HD: Integrate]
+
+SensorSize=640;480
+SoftwareTriggerSupport=false
+ChunksSupport=false
+UseSystemTimestamp=true
+FrameRateA=16.0
+FrameRateB=16.0
+
[Basler:acA1300-30gc]
ChunkList=Timestamp Framecounter
diff --git a/src/arv-v4l2.xml b/src/arv-v4l2.xml
index 482a4a1b9..ea0687c6a 100644
--- a/src/arv-v4l2.xml
+++ b/src/arv-v4l2.xml
@@ -20,6 +20,7 @@
DeviceControl
ImageFormatControl
+ AnalogControl
AcquisitionControl
TransportLayerControl
@@ -77,8 +78,8 @@
- SensorHeight
SensorWidth
+ SensorHeight
OffsetX
OffsetY
Width
@@ -88,74 +89,23 @@
PixelFormat
-
- Full height of image sensor.
- SensorHeightRegister
-
-
-
- 0x118
- 4
- RO
- Device
- Unsigned
- BigEndian
-
-
-
- Full height of image sensor.
- SensorWidthRegister
-
-
-
- 0x11c
- 4
- RO
- Device
- Unsigned
- BigEndian
-
-
X offset of image, in pixels.
- OffsetXRegister
- 0
- SensorWidth
- 1
+ 0
+ RO
-
- 0x130
- 4
- RW
- Device
- Unsigned
- BigEndian
-
-
Y offset of image, in pixels.
- OffsetYRegister
- 0
- SensorHeight
- 1
+ 0
+ RO
-
- 0x134
- 4
- RW
- Device
- Unsigned
- BigEndian
-
-
Width of image, in pixels.
WidthRegister
- 1
- SensorWidth
- 1
+ WidthRegister
+ WidthRegister
@@ -164,15 +114,14 @@
RW
Device
Unsigned
- BigEndian
+ LittleEndian
Height of image, in pixels.
HeightRegister
- 1
- SensorHeight
- 1
+ HeightRegister
+ HeightRegister
@@ -181,42 +130,17 @@
RW
Device
Unsigned
- BigEndian
+ LittleEndian
-
- Pixel format
-
- 17301515
-
-
- 17301514
-
-
- 17301512
-
-
- 17301513
-
-
- 17301505
-
-
- 35127316
-
-
- 17825799
-
- PixelFormatRegister
-
-
0x128
4
RW
Device
Unsigned
- BigEndian
+ LittleEndian
+ HexNumber
@@ -225,7 +149,9 @@
AcquisitionMode
AcquisitionStart
AcquisitionStop
+ AcquisitionFrameRate
ExposureTimeAbs
+ ExposureAuto
@@ -246,7 +172,7 @@
WO
Device
Unsigned
- BigEndian
+ LittleEndian
@@ -254,49 +180,39 @@
1
-
- 2
-
-
- 3
-
- AcquisitionModeRegister
+ 1
-
- 0x12c
- 4
- RW
- Device
- Unsigned
- BigEndian
-
-
- AcquisitionFrameRateConverter
+ AcquisitionFrameRateRegister
+ AcquisitionFrameRateMinRegister
+ AcquisitionFrameRateMaxRegister
+ Hz
-
- Frame rate, in frames per second.
- (1000000 / FROM)
- (1000000 / TO)
- AcquisitionFramePeriod
-
+
+ 0x300
+ 8
+ RW
+ Device
+ LittleEndian
+
-
- AcquisitionFramePeriodRegister
- 1000
- 10000000
-
+
+ 0x308
+ 8
+ RW
+ Device
+ LittleEndian
+
-
- 0x138
- 4
+
+ 0x310
+ 8
RW
Device
- Unsigned
- BigEndian
-
+ LittleEndian
+
@@ -330,7 +246,7 @@
RW
Device
Unsigned
- BigEndian
+ LittleEndian
@@ -348,7 +264,7 @@
RW
Device
Unsigned
- BigEndian
+ LittleEndian
@@ -366,75 +282,109 @@
RW
Device
Unsigned
- BigEndian
+ LittleEndian
Exposure duration, in microseconds.
ExposureTimeAbsConverter
- 10.0
- 10000000.0
+ ExposureMinRegister
+ ExposureMaxRegister
+ ExposureAvailable
- FROM
- TO
+ FROM/100
+ TO*100
ExposureTimeAbsRegister
- 0x120
+ 0x400
4
RW
Device
Unsigned
- BigEndian
+ LittleEndian
+
+
+
+ 0x404
+ 4
+ RO
+ Device
+ Unsigned
+ LittleEndian
+
+
+
+ 0x408
+ 4
+ RO
+ Device
+ Unsigned
+ LittleEndian
+
+
+
+ Automatic exposure
+
+ 0
+
+
+ 2
+
+ ExposureAutoRegister
+
+
+
+ 0x40C
+ 4
+ RW
+ Device
+ Unsigned
+ LittleEndian
- GainRaw
- GainAuto
+ Gain
-
- Raw gain.
- GainRawRegister
- 0
- 10
-
+
+ Gain
+ GainRegister
+ GainMinRegister
+ GainMaxRegister
+ GainAvailable
+
-
- 0x110
+
+ 0x200
4
RW
Device
Unsigned
- BigEndian
+ LittleEndian
-
- Automatic gain mode.
-
- 1
-
-
- 3
-
-
- 2
-
- GainAutoRegister
-
+
+ 0x204
+ 4
+ RO
+ Device
+ Unsigned
+ LittleEndian
+
-
- 0x114
+
+ 0x208
4
- RW
+ RO
Device
Unsigned
- BigEndian
+ LittleEndian
@@ -443,12 +393,14 @@
PayloadSize
-
- Width
- Height
- PixelFormat
- WIDTH * HEIGHT * ((PIXELFORMAT>>16)&0xFF) / 8
-
+
+ 0x118
+ 4
+ RO
+ Device
+ Unsigned
+ LittleEndian
+
Indicates whether a live grab is under way
diff --git a/src/arvcameratest.c b/src/arvcameratest.c
index cd12b4a00..cde8fa870 100644
--- a/src/arvcameratest.c
+++ b/src/arvcameratest.c
@@ -5,6 +5,8 @@
#include
#include
+#define N_BUFFERS 5
+
static char *arv_option_camera_name = NULL;
static char *arv_option_debug_domains = NULL;
static char *arv_option_trigger = NULL;
@@ -41,6 +43,7 @@ static char *arv_option_uv_usb_mode = NULL;
static gboolean arv_option_show_version = FALSE;
static gboolean arv_option_gv_allow_broadcast_discovery_ack = FALSE;
static char *arv_option_gv_port_range = NULL;
+static gboolean arv_option_native_buffers = FALSE;
/* clang-format off */
static const GOptionEntry arv_option_entries[] =
@@ -178,7 +181,7 @@ static const GOptionEntry arv_option_entries[] =
},
{
"multipart", '\0', 0, G_OPTION_ARG_NONE,
- &arv_option_multipart, "Enable multipart payload",
+ &arv_option_multipart, "Enable multipart payload",
NULL
},
{
@@ -217,6 +220,11 @@ static const GOptionEntry arv_option_entries[] =
&arv_option_gv_port_range, "GV port range",
"-"
},
+ {
+ "native-buffers", '\0', 0, G_OPTION_ARG_NONE,
+ &arv_option_native_buffers, "Enable native buffers",
+ NULL
+ },
{
"debug", 'd', 0, G_OPTION_ARG_STRING,
&arv_option_debug_domains, "Debug output selection",
@@ -684,7 +692,12 @@ main (int argc, char **argv)
NULL);
}
- arv_stream_create_buffers(stream, 50, NULL, NULL, NULL);
+ if (arv_option_native_buffers)
+ arv_stream_create_buffers(stream, N_BUFFERS, NULL, NULL, NULL);
+ else {
+ for (i = 0; i < N_BUFFERS; i++)
+ arv_stream_push_buffer (stream, arv_buffer_new_allocate (payload));
+ }
arv_camera_set_acquisition_mode (camera, ARV_ACQUISITION_MODE_CONTINUOUS, NULL);
diff --git a/src/arvdebugprivate.h b/src/arvdebugprivate.h
index ce31f5e92..2d5c3b8ff 100644
--- a/src/arvdebugprivate.h
+++ b/src/arvdebugprivate.h
@@ -85,6 +85,7 @@ extern ArvDebugCategoryInfos arv_debug_category_infos[];
#define arv_warning_stream_thread(...) arv_warning (ARV_DEBUG_CATEGORY_STREAM_THREAD, __VA_ARGS__)
#define arv_info_stream_thread(...) arv_info (ARV_DEBUG_CATEGORY_STREAM_THREAD, __VA_ARGS__)
#define arv_debug_stream_thread(...) arv_debug (ARV_DEBUG_CATEGORY_STREAM_THREAD, __VA_ARGS__)
+#define arv_trace_stream_thread(...) arv_trace (ARV_DEBUG_CATEGORY_STREAM_THREAD, __VA_ARGS__)
#define arv_warning_cp(...) arv_warning (ARV_DEBUG_CATEGORY_CP, __VA_ARGS__)
#define arv_info_cp(...) arv_info (ARV_DEBUG_CATEGORY_CP, __VA_ARGS__)
diff --git a/src/arvmisc.c b/src/arvmisc.c
index 11bb67607..c162244d5 100644
--- a/src/arvmisc.c
+++ b/src/arvmisc.c
@@ -745,6 +745,13 @@ ArvGstCapsInfos arv_gst_caps_infos[] = {
"video/x-raw-rgb, format=(string)RGB, bpp=(int)24, depth=(int)8",
"video/x-raw-rgb", 24, 24, 0
},
+ {
+ ARV_PIXEL_FORMAT_BGR_8_PACKED,
+ "video/x-raw, format=(string)BGR",
+ "video/x-raw", "BGR",
+ "video/x-raw-rgb, format=(string)BGR, bpp=(int)24, depth=(int)8",
+ "video/x-raw-rgb", 24, 24, 0
+ },
{
ARV_PIXEL_FORMAT_RGBA_8_PACKED,
"video/x-raw, format=(string)RGBA",
diff --git a/src/arvstream.c b/src/arvstream.c
index d0178d2b2..65ae8bb9f 100644
--- a/src/arvstream.c
+++ b/src/arvstream.c
@@ -300,13 +300,26 @@ gboolean
arv_stream_start_acquisition (ArvStream *stream, GError **error)
{
ArvStreamClass *stream_class;
+ GError *local_error = NULL;
+ gboolean success;
g_return_val_if_fail (ARV_IS_STREAM (stream), FALSE);
stream_class = ARV_STREAM_GET_CLASS (stream);
g_return_val_if_fail (stream_class->start_acquisition != NULL, FALSE);
- return stream_class->start_acquisition (stream, error);
+ success = stream_class->start_acquisition (stream, &local_error);
+ if (!success) {
+ if (local_error != NULL)
+ arv_warning_stream ("Failed to start stream acquisition (%s)", local_error->message);
+ else
+ arv_warning_stream ("Failed to start stream acquisition");
+ }
+
+ if (local_error != NULL)
+ g_propagate_error(error, local_error);
+
+ return success;
}
/**
@@ -338,6 +351,8 @@ arv_stream_stop_acquisition (ArvStream *stream, GError **error)
if (success && priv->n_buffer_filling != 0) {
g_critical ("Buffer filling count must be 0 after acquisition stop (was %d)", priv->n_buffer_filling);
}
+ if (!success)
+ arv_warning_stream ("Failed to stop stream acquisition ");
return success;
}
@@ -375,15 +390,15 @@ arv_stream_delete_buffers (ArvStream *stream)
do {
buffer = g_async_queue_try_pop_unlocked (priv->input_queue);
- if (buffer != NULL) {
+ if (ARV_IS_BUFFER(buffer)) {
g_object_unref (buffer);
n_deleted++;
- }
+ }
} while (buffer != NULL);
do {
buffer = g_async_queue_try_pop_unlocked (priv->output_queue);
- if (buffer != NULL) {
+ if (ARV_IS_BUFFER(buffer)) {
g_object_unref (buffer);
n_deleted++;
}
@@ -714,6 +729,7 @@ arv_stream_create_buffers (ArvStream *stream, unsigned int n_buffers,
{
ArvStreamClass *stream_class;
ArvStreamPrivate *priv = arv_stream_get_instance_private (stream);
+ gboolean success;
size_t payload_size;
unsigned int i;
@@ -726,9 +742,24 @@ arv_stream_create_buffers (ArvStream *stream, unsigned int n_buffers,
return FALSE;
stream_class = ARV_STREAM_GET_CLASS (stream);
- if (stream_class->create_buffers != NULL)
- return stream_class->create_buffers (stream, n_buffers, payload_size,
- user_data, user_data_destroy_func, error);
+ if (stream_class->create_buffers != NULL) {
+ GError *local_error = NULL;
+
+ success = stream_class->create_buffers (stream, n_buffers, payload_size,
+ user_data, user_data_destroy_func, &local_error);
+ if (!success) {
+ if (local_error != NULL) {
+ arv_warning_stream ("Failed to create native buffers: %s",
+ local_error->message);
+ g_propagate_error(error, local_error);
+ } else {
+ arv_warning_stream ("Failed to create native buffers");
+ }
+ }
+
+
+ return success;
+ }
for (i = 0; i < n_buffers; i++)
arv_stream_push_buffer (stream, arv_buffer_new_full (payload_size, NULL,
diff --git a/src/arvtest.c b/src/arvtest.c
index c3576c2ec..d4ef64d47 100644
--- a/src/arvtest.c
+++ b/src/arvtest.c
@@ -694,6 +694,9 @@ _multiple_acquisition (ArvTest *test, const char *test_name, ArvTestCamera *test
gint64 start_time = -1;
gint64 end_time = -1;
gboolean frame_rate_success;
+ gboolean callback_init_success;
+ gboolean callback_buffer_success;
+ gboolean callback_exit_success;
guint n_completed_buffers = 0;
guint n_expected_buffers = 10;
@@ -752,13 +755,18 @@ _multiple_acquisition (ArvTest *test, const char *test_name, ArvTestCamera *test
g_clear_object (&stream);
- success = success &&
- callback_data.n_init == 1 &&
+ callback_init_success = callback_data.n_init == 1;
+ callback_buffer_success =
callback_data.n_start == callback_data.n_done &&
- callback_data.n_success >= n_expected_buffers &&
- callback_data.n_exit == 1;
+ callback_data.n_success >= n_expected_buffers;
+ callback_exit_success = callback_data.n_exit == 1;
- message = g_strdup_printf ("%u/%u%s%s", n_completed_buffers, n_expected_buffers,
+ success = success && callback_init_success && callback_buffer_success && callback_exit_success;
+
+ message = g_strdup_printf ("%u/%u%s%s%s%s%s", n_completed_buffers, n_expected_buffers,
+ callback_init_success ? "" : " cb_init_err",
+ callback_buffer_success ? "" : " cb_buffer_err",
+ callback_exit_success ? "" : " cb_exit_err",
error != NULL ? " " : "",
error != NULL ? error->message : "");
arv_test_camera_add_result (test_camera, test_name, "BufferCheck",
@@ -777,6 +785,8 @@ _multiple_acquisition (ArvTest *test, const char *test_name, ArvTestCamera *test
message = g_strdup_printf ("%.2f Hz", actual_frame_rate);
} else
message = g_strdup_printf ("%.2f Hz (expected:%.2f Hz)", actual_frame_rate, frame_rate);
+ } else {
+ message = g_strdup_printf ("Missing timestamp information");
}
arv_test_camera_add_result (test_camera, test_name, "FrameRate",
diff --git a/src/arvuvstream.c b/src/arvuvstream.c
index 5619215ee..043868c41 100644
--- a/src/arvuvstream.c
+++ b/src/arvuvstream.c
@@ -238,7 +238,6 @@ arv_uv_stream_payload_cb (struct libusb_transfer *transfer)
switch (transfer->status) {
case LIBUSB_TRANSFER_COMPLETED:
ctx->total_payload_transferred += transfer->actual_length;
-
if (ctx->buffer->priv->payload_type == ARV_BUFFER_PAYLOAD_TYPE_GENDC_CONTAINER){
if(!arv_uvsp_packet_is_gendc (ctx->buffer->priv->data)){
arv_warning_sp ("Invalid GenDC Container: Signature shows %.4s "
@@ -796,7 +795,6 @@ arv_uv_stream_thread_sync (void *data)
buffer->priv->parts[0].y_offset = 0;
buffer->priv->parts[0].x_padding = arv_uvsp_packet_get_gendc_partpadding_x(buffer->priv->data + partoffset);
buffer->priv->parts[0].y_padding = arv_uvsp_packet_get_gendc_partpadding_y(buffer->priv->data + partoffset);
-
break;
}
}
diff --git a/src/arvv4l2device.c b/src/arvv4l2device.c
index 0c99ae8ef..64d55bd52 100644
--- a/src/arvv4l2device.c
+++ b/src/arvv4l2device.c
@@ -1,6 +1,6 @@
/* Aravis - Digital camera library
*
- * Copyright © 2009-2019 Emmanuel Pacaud
+ * Copyright © 2009-2023 Emmanuel Pacaud
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
@@ -28,16 +28,40 @@
#include
#include
#include
-#include
-#include
+#include
+#include
+#include
#include
#include
-
-#define ARV_V4L2_ADDRESS_DEVICE_VENDOR_NAME 0x48
-#define ARV_V4L2_ADDRESS_DEVICE_MODEL_NAME 0x68
-#define ARV_V4L2_ADDRESS_DEVICE_VERSION 0x88
-#define ARV_V4L2_ADDRESS_DEVICE_MANUFACTURER_INFO 0xa8
-#define ARV_V4L2_ADDRESS_DEVICE_ID 0xd8
+#include
+#include
+#include
+#include
+
+#define ARV_V4L2_ADDRESS_DEVICE_VENDOR_NAME 0x0048
+#define ARV_V4L2_ADDRESS_DEVICE_MODEL_NAME 0x0068
+#define ARV_V4L2_ADDRESS_DEVICE_VERSION 0x0088
+#define ARV_V4L2_ADDRESS_DEVICE_MANUFACTURER_INFO 0x00a8
+#define ARV_V4L2_ADDRESS_DEVICE_ID 0x00d8
+
+#define ARV_V4L2_ADDRESS_WIDTH 0x0100
+#define ARV_V4L2_ADDRESS_HEIGHT 0x0104
+#define ARV_V4L2_ADDRESS_PAYLOAD_SIZE 0x0118
+#define ARV_V4L2_ADDRESS_ACQUISITION_COMMAND 0x0124
+#define ARV_V4L2_ADDRESS_PIXEL_FORMAT 0x0128
+
+#define ARV_V4L2_ADDRESS_GAIN 0x0200
+#define ARV_V4L2_ADDRESS_GAIN_MIN 0x0204
+#define ARV_V4L2_ADDRESS_GAIN_MAX 0x0208
+
+#define ARV_V4L2_ADDRESS_FRAME_RATE 0x0300
+#define ARV_V4L2_ADDRESS_FRAME_RATE_MIN 0x0308
+#define ARV_V4L2_ADDRESS_FRAME_RATE_MAX 0x0310
+
+#define ARV_V4L2_ADDRESS_EXPOSURE_TIME 0x0400
+#define ARV_V4L2_ADDRESS_EXPOSURE_MIN 0x0404
+#define ARV_V4L2_ADDRESS_EXPOSURE_MAX 0x0408
+#define ARV_V4L2_ADDRESS_EXPOSURE_AUTO 0x040C
enum
{
@@ -53,6 +77,23 @@ typedef struct {
char *device_version;
char *device_driver;
+ guint sensor_width;
+ guint sensor_height;
+
+ gboolean gain_available;
+ gint32 gain_min;
+ gint32 gain_max;
+
+ gboolean exposure_available;
+ gint32 exposure_min;
+ gint32 exposure_max;
+ gint32 exposure_manual_index;
+ gint32 exposure_auto_index;
+
+ gint pixel_format_idx;
+ GArray *pixel_formats;
+ GArray *frame_sizes;
+
char *genicam_xml;
size_t genicam_xml_size;
@@ -96,33 +137,372 @@ arv_v4l2_device_get_genicam (ArvDevice *device)
return priv->genicam;
}
+gboolean
+arv_v4l2_device_set_image_format (ArvV4l2Device *device)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ struct v4l2_format format = {0};
+ struct v4l2_requestbuffers req = {0};
+ struct v4l2_frmsizeenum *frame_size;
+ ArvPixelFormat arv_pixel_format;
+
+ req.count = 0;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+ if (arv_v4l2_ioctl(priv->device_fd, VIDIOC_REQBUFS, &req) == -1) {
+ arv_warning_device ("Failed to release all v4l2 buffers (%s)", strerror(errno));
+ return FALSE;
+ }
+
+ frame_size = &g_array_index (priv->frame_sizes,
+ struct v4l2_frmsizeenum,
+ priv->pixel_format_idx);
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.fmt.pix.width = frame_size->type == V4L2_FRMSIZE_TYPE_DISCRETE ?
+ frame_size->discrete.width :
+ frame_size->stepwise.max_width;
+ format.fmt.pix.height = frame_size->type == V4L2_FRMSIZE_TYPE_DISCRETE ?
+ frame_size->discrete.height :
+ frame_size->stepwise.max_height;
+
+ arv_pixel_format = g_array_index (priv->pixel_formats, guint32, priv->pixel_format_idx);
+
+ format.fmt.pix.pixelformat = arv_pixel_format_to_v4l2(arv_pixel_format);
+ if (format.fmt.pix.pixelformat == 0) {
+ arv_warning_device ("Unknown 0x%08x pixel format", arv_pixel_format);
+ return FALSE;
+ }
+
+ format.fmt.pix.field = V4L2_FIELD_NONE;
+
+ arv_info_device ("Set format to %d×%d %s",
+ format.fmt.pix.width,
+ format.fmt.pix.height,
+ arv_pixel_format_to_gst_caps_string(arv_pixel_format));
+
+ if (arv_v4l2_ioctl(priv->device_fd, VIDIOC_S_FMT, &format) == -1) {
+ arv_warning_device ("Failed to select v4l2 format (%s)", strerror(errno));
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+gboolean
+arv_v4l2_device_get_image_format (ArvV4l2Device *device,
+ guint32 *payload_size,
+ ArvPixelFormat *pixel_format,
+ guint32 *width,
+ guint32 *height,
+ guint32 *bytes_per_line)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ struct v4l2_format format = {0};
+ ArvPixelFormat arv_pixel_format;
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_G_FMT, &format) == -1) {
+ arv_warning_device ("Failed to retrieve v4l2 format (%s)", strerror(errno));
+ return FALSE;
+ }
+
+ arv_pixel_format = arv_pixel_format_from_v4l2(format.fmt.pix.pixelformat);
+ if (arv_pixel_format == 0) {
+ arv_warning_device ("Uknown v4l2 pixel format (%d)", format.fmt.pix.pixelformat);
+ return FALSE;
+ }
+
+ if (payload_size != NULL)
+ *payload_size = format.fmt.pix.sizeimage;
+ if (pixel_format != NULL)
+ *pixel_format = arv_pixel_format;
+ if (width != NULL)
+ *width = format.fmt.pix.width;
+ if (height != NULL)
+ *height = format.fmt.pix.height;
+ if (bytes_per_line != NULL)
+ *bytes_per_line = format.fmt.pix.bytesperline;
+
+ arv_info_device ("Current format %d×%d %s %d bytes, %d bytes per line",
+ format.fmt.pix.width,
+ format.fmt.pix.height,
+ arv_pixel_format_to_gst_caps_string(arv_pixel_format),
+ format.fmt.pix.sizeimage,
+ format.fmt.pix.bytesperline);
+
+ return TRUE;
+}
+
+static gboolean
+arv_v4l2_device_get_frame_rate_bounds (ArvV4l2Device *device, double *framerate_min, double *framerate_max)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ ArvPixelFormat arv_pixel_format;
+ double fr_min = 0, fr_max = 0;
+ struct v4l2_format format = {0};
+ unsigned int i;
+
+ if (framerate_min != NULL)
+ *framerate_min = 0.0;
+ if (framerate_max != NULL)
+ *framerate_max = 0.0;
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_G_FMT, &format) == -1) {
+ arv_warning_device ("Failed to retrieve v4l2 format (%s)", strerror(errno));
+ return FALSE;
+ }
+
+ arv_pixel_format = arv_pixel_format_from_v4l2(format.fmt.pix.pixelformat);
+ if (arv_pixel_format == 0) {
+ arv_warning_device ("Uknown v4l2 pixel format (%d)", format.fmt.pix.pixelformat);
+ return FALSE;
+ }
+
+ for (i = 0; TRUE; i++) {
+ struct v4l2_frmivalenum frmivalenum = {0};
+
+ frmivalenum.index = i;
+ frmivalenum.pixel_format = format.fmt.pix.pixelformat;
+ frmivalenum.width = format.fmt.pix.width;
+ frmivalenum.height = format.fmt.pix.height;
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmivalenum) == -1) {
+ if (i == 0) {
+ arv_warning_device ("Can't find frame rate");
+ return FALSE;
+ }
+ break;
+ }
+
+ if (frmivalenum.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ double value =
+ (double) frmivalenum.discrete.denominator /
+ (double) frmivalenum.discrete.numerator;
+
+ if (i == 0) {
+ fr_max = fr_min = value;
+ } else {
+ if (value < fr_min)
+ fr_min = value;
+ if (value > fr_max)
+ fr_max = value;
+ }
+ } else if (frmivalenum.type == V4L2_FRMIVAL_TYPE_CONTINUOUS ||
+ frmivalenum.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ fr_min =
+ (double) frmivalenum.stepwise.min.denominator /
+ (double) frmivalenum.stepwise.min.numerator;
+ fr_min =
+ (double) frmivalenum.stepwise.min.denominator /
+ (double) frmivalenum.stepwise.min.numerator;
+ break;
+ } else {
+ if (i == 0) {
+ arv_warning_device ("Can't find frame rate");
+ return FALSE;
+ }
+ break;
+ }
+ }
+
+ if (framerate_min != NULL)
+ *framerate_min = fr_min;
+ if (framerate_max != NULL)
+ *framerate_max = fr_max;
+
+ return TRUE;
+}
+
+static double
+_get_frame_rate (ArvV4l2Device *device)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ struct v4l2_streamparm streamparm = {0};
+
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (ioctl (priv->device_fd, VIDIOC_G_PARM, &streamparm) == -1) {
+ arv_warning_device ("Failed to set frame rate");
+ return 0.0;
+ }
+
+ return
+ (double) streamparm.parm.capture.timeperframe.denominator /
+ (double) streamparm.parm.capture.timeperframe.numerator;
+}
+
+static gboolean
+_set_frame_rate (ArvV4l2Device *device, double frame_rate)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ struct v4l2_streamparm streamparm = {0};
+
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ streamparm.parm.capture.timeperframe.numerator = 1000000.0;
+ streamparm.parm.capture.timeperframe.denominator = 1000000.0 * frame_rate;
+
+ if (ioctl (priv->device_fd, VIDIOC_S_PARM, &streamparm) == -1) {
+ arv_warning_device ("Failed to set frame rate");
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static void
+_control_stream (ArvV4l2Device *device, gboolean enable)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (arv_v4l2_ioctl (priv->device_fd, enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF, &type) == -1) {
+ arv_warning_device ("v4l2 stream %s failed (%s)",
+ enable ? "start" : "stop",
+ strerror (errno));
+ } else {
+ arv_info_device ("Stream %s for device '%s'", enable ? "started" : "stopped", priv->device_file);
+ }
+}
+
static gboolean
arv_v4l2_device_read_memory (ArvDevice *device, guint64 address, guint32 size, void *buffer, GError **error)
{
- ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ ArvV4l2Device *v4l2_device = ARV_V4L2_DEVICE(device);
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ gboolean found = TRUE;
if (size < 1 || buffer == NULL)
return FALSE;
- if (address == ARV_V4L2_ADDRESS_DEVICE_VENDOR_NAME) {
- strncpy (buffer, priv->device_driver, size - 1);
+ if (address == ARV_V4L2_ADDRESS_DEVICE_VENDOR_NAME) {
+ strncpy (buffer, priv->device_driver, size - 1);
((char *) buffer)[size - 1] = '\0';
- } else if (address == ARV_V4L2_ADDRESS_DEVICE_MODEL_NAME) {
- strncpy (buffer, priv->device_card, size - 1);
+ } else if (address == ARV_V4L2_ADDRESS_DEVICE_MODEL_NAME) {
+ strncpy (buffer, priv->device_card, size - 1);
((char *) buffer)[size - 1] = '\0';
- } else if (address == ARV_V4L2_ADDRESS_DEVICE_VERSION) {
- strncpy (buffer, priv->device_version, size - 1);
+ } else if (address == ARV_V4L2_ADDRESS_DEVICE_VERSION) {
+ strncpy (buffer, priv->device_version, size - 1);
((char *) buffer)[size - 1] = '\0';
- } else if (address == ARV_V4L2_ADDRESS_DEVICE_MANUFACTURER_INFO) {
- strncpy (buffer, "Aravis", size - 1);
+ } else if (address == ARV_V4L2_ADDRESS_DEVICE_MANUFACTURER_INFO) {
+ strncpy (buffer, "Aravis", size - 1);
((char *) buffer)[size - 1] = '\0';
- } else if (address == ARV_V4L2_ADDRESS_DEVICE_ID) {
- strncpy (buffer, priv->device_file, size - 1);
+ } else if (address == ARV_V4L2_ADDRESS_DEVICE_ID) {
+ strncpy (buffer, priv->device_file, size - 1);
((char *) buffer)[size - 1] = '\0';
- } else {
- /* TODO set error */
- return FALSE;
- }
+ } else {
+ if (priv->pixel_format_idx < priv->frame_sizes->len &&
+ priv->pixel_format_idx < priv->pixel_formats->len) {
+ struct v4l2_frmsizeenum *frame_size;
+
+ frame_size = &g_array_index (priv->frame_sizes,
+ struct v4l2_frmsizeenum,
+ priv->pixel_format_idx);
+
+ if (size == 4) {
+ union {
+ gint32 i32;
+ float f;
+ } value;
+
+ g_assert (sizeof (value.i32) == 4);
+ g_assert (sizeof (value.f) == 4);
+
+ switch (address) {
+ case ARV_V4L2_ADDRESS_WIDTH:
+ value.i32 = frame_size->type == V4L2_FRMSIZE_TYPE_DISCRETE ?
+ frame_size->discrete.width :
+ frame_size->stepwise.max_width;
+ break;
+ case ARV_V4L2_ADDRESS_HEIGHT:
+ value.i32 = frame_size->type == V4L2_FRMSIZE_TYPE_DISCRETE ?
+ frame_size->discrete.height :
+ frame_size->stepwise.max_height;
+ break;
+ case ARV_V4L2_ADDRESS_GAIN:
+ value.i32 = arv_v4l2_get_ctrl (priv->device_fd, V4L2_CID_GAIN);
+ break;
+ case ARV_V4L2_ADDRESS_GAIN_MIN:
+ value.i32 = priv->gain_min;
+ break;
+ case ARV_V4L2_ADDRESS_GAIN_MAX:
+ value.i32 = priv->gain_max;
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_TIME:
+ value.i32 = arv_v4l2_get_int32_ext_ctrl(priv->device_fd,
+ V4L2_CTRL_CLASS_CAMERA,
+ V4L2_CID_EXPOSURE_ABSOLUTE);
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_MIN:
+ value.i32 = priv->exposure_min;
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_MAX:
+ value.i32 = priv->exposure_max;
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_AUTO:
+ value.i32 = arv_v4l2_get_int32_ext_ctrl
+ (priv->device_fd,
+ V4L2_CTRL_CLASS_CAMERA,
+ V4L2_CID_EXPOSURE_AUTO) == priv->exposure_auto_index ?
+ ARV_AUTO_CONTINUOUS : ARV_AUTO_OFF;
+ break;
+ case ARV_V4L2_ADDRESS_PAYLOAD_SIZE:
+ arv_v4l2_device_set_image_format (v4l2_device);
+ arv_v4l2_device_get_image_format (v4l2_device, (guint32 *) &value,
+ NULL, NULL, NULL, NULL);
+ break;
+ case ARV_V4L2_ADDRESS_PIXEL_FORMAT:
+ value.i32 = g_array_index (priv->pixel_formats, guint32,
+ priv->pixel_format_idx);
+ break;
+ default:
+ found = FALSE;
+ }
+
+ if (found) {
+ memcpy (buffer, &value, sizeof (value));
+ }
+ } else if (size == 8) {
+ union {
+ gint64 i64;
+ double d;
+ } value;
+
+ g_assert (sizeof (value.i64) == 8);
+ g_assert (sizeof (value.d) == 8);
+
+ switch (address) {
+ case ARV_V4L2_ADDRESS_FRAME_RATE:
+ value.d = _get_frame_rate(v4l2_device);
+ break;
+ case ARV_V4L2_ADDRESS_FRAME_RATE_MIN:
+ arv_v4l2_device_get_frame_rate_bounds(v4l2_device, &value.d, NULL);
+ break;
+ case ARV_V4L2_ADDRESS_FRAME_RATE_MAX:
+ arv_v4l2_device_get_frame_rate_bounds(v4l2_device, NULL, &value.d);
+ break;
+ default:
+ found = FALSE;
+ }
+
+ if (found) {
+ memcpy (buffer, &value, sizeof (value));
+ }
+ } else {
+ found = FALSE;
+ }
+ } else {
+ found = FALSE;
+ }
+ }
+
+ if (!found) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR_INVALID_ADDRESS,
+ "Invalid address (0x%08" G_GINT64_MODIFIER "x)", address);
+ return FALSE;
+ }
return TRUE;
}
@@ -130,8 +510,79 @@ arv_v4l2_device_read_memory (ArvDevice *device, guint64 address, guint32 size, v
static gboolean
arv_v4l2_device_write_memory (ArvDevice *device, guint64 address, guint32 size, const void *buffer, GError **error)
{
- /* TODO set error */
- return FALSE;
+ ArvV4l2Device *v4l2_device = ARV_V4L2_DEVICE(device);
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (device));
+ gboolean found = TRUE;
+ gint i;
+
+ if (size == 4) {
+ union {
+ gint32 i32;
+ float f;
+ } value;
+
+ g_assert (sizeof (value.i32) == 4);
+ g_assert (sizeof (value.f) == 4);
+
+ memcpy (&value, buffer, sizeof (value));
+ switch (address) {
+ case ARV_V4L2_ADDRESS_ACQUISITION_COMMAND:
+ _control_stream (v4l2_device, value.i32 != 0);
+ break;
+ case ARV_V4L2_ADDRESS_GAIN:
+ arv_v4l2_set_ctrl (priv->device_fd, V4L2_CID_GAIN, value.i32);
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_TIME:
+ arv_v4l2_set_int32_ext_ctrl (priv->device_fd,
+ V4L2_CTRL_CLASS_CAMERA,
+ V4L2_CID_EXPOSURE_ABSOLUTE,
+ value.i32);
+ break;
+ case ARV_V4L2_ADDRESS_EXPOSURE_AUTO:
+ arv_v4l2_set_int32_ext_ctrl (priv->device_fd, V4L2_CTRL_CLASS_CAMERA,
+ V4L2_CID_EXPOSURE_AUTO, value.i32 == ARV_AUTO_OFF ?
+ priv->exposure_manual_index :
+ priv->exposure_auto_index);
+ break;
+ case ARV_V4L2_ADDRESS_PIXEL_FORMAT:
+ for (i = 0; i < priv->pixel_formats->len; i++) {
+ if (g_array_index(priv->pixel_formats, guint32, i) == value.i32)
+ priv->pixel_format_idx = i;
+ }
+ if (i == priv->pixel_formats->len)
+ found = FALSE;
+ break;
+ default:
+ found = FALSE;
+ }
+ } else if (size == 8) {
+ union {
+ gint64 i64;
+ double d;
+ } value;
+
+ g_assert (sizeof (value.i64) == 8);
+ g_assert (sizeof (value.d) == 8);
+
+ memcpy (&value, buffer, sizeof (value));
+ switch (address) {
+ case ARV_V4L2_ADDRESS_FRAME_RATE:
+ _set_frame_rate(v4l2_device, value.d);
+ break;
+ default:
+ found = FALSE;
+ }
+ } else {
+ found = FALSE;
+ }
+
+ if (!found) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR_INVALID_ADDRESS,
+ "Invalid address (0x%08" G_GINT64_MODIFIER "x)", address);
+ return FALSE;
+ }
+
+ return TRUE;
}
static gboolean
@@ -146,15 +597,42 @@ arv_v4l2_device_write_register (ArvDevice *device, guint64 address, guint32 valu
return arv_v4l2_device_write_memory (device, address, sizeof (guint32), &value, error);
}
+int
+arv_v4l2_device_get_fd (ArvV4l2Device *v4l2_device)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (v4l2_device);
+
+ g_return_val_if_fail(ARV_IS_V4L2_DEVICE(v4l2_device), 0);
+
+ return priv->device_fd;
+}
+
/**
* arv_v4l2_device_new:
* @error: a #GError placeholder, %NULL to ignore
*
* Returns: a newly created #ArvDevice connected to a v4l2 device
*
- * Since: 0.8.7
+ * Since: 0.10.0
*/
+static void
+arv_v4l2_device_set_property (GObject *self, guint prop_id, const GValue *value, GParamSpec *pspec)
+{
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (self));
+
+ switch (prop_id)
+ {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ break;
+ case PROP_V4L2_DEVICE_DEVICE_FILE:
+ g_free (priv->device_file);
+ priv->device_file = g_value_dup_string (value);
+ break;
+ }
+}
+
ArvDevice *
arv_v4l2_device_new (const char *device_file, GError **error)
{
@@ -171,35 +649,35 @@ arv_v4l2_device_init (ArvV4l2Device *v4l2_device)
priv->device_fd = -1;
}
-static void
-arv_v4l2_device_finalize (GObject *object)
-{
- ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (object));
-
- if (priv->device_fd != -1)
- v4l2_close (priv->device_fd);
-
- g_clear_object (&priv->genicam);
- g_clear_pointer (&priv->genicam_xml, g_free);
- g_clear_pointer (&priv->device_file, g_free);
- g_clear_pointer (&priv->device_version, g_free);
- g_clear_pointer (&priv->device_driver, g_free);
- g_clear_pointer (&priv->device_card, g_free);
-
- G_OBJECT_CLASS (arv_v4l2_device_parent_class)->finalize (object);
-}
-
static void
arv_v4l2_device_constructed (GObject *self)
{
ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (self));
- struct v4l2_capability cap;
+ GString *format_feature;
+ char *feature;
+ struct v4l2_capability cap = {0};
+ struct v4l2_cropcap crop_cap = {0};
GBytes *bytes;
GError *error = NULL;
+ struct stat st;
+ int i;
+
+ if (stat(priv->device_file, &st) == -1) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "Can't identify '%s' (%s)\n",
+ priv->device_file, strerror(errno)));
+ return;
+ }
- /* TODO errors */
+ if (!S_ISCHR(st.st_mode)) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "'%s' is no device\n", priv->device_file));
+ return;
+ }
- priv->device_fd = v4l2_open (priv->device_file, O_RDWR);
+ priv->device_fd = open (priv->device_file, O_RDWR | O_NONBLOCK, 0);
if (priv->device_fd == -1) {
arv_device_take_init_error (ARV_DEVICE (self),
g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
@@ -207,13 +685,64 @@ arv_v4l2_device_constructed (GObject *self)
return;
}
- if (v4l2_ioctl (priv->device_fd, VIDIOC_QUERYCAP, &cap) == -1) {
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_QUERYCAP, &cap) == -1) {
arv_device_take_init_error (ARV_DEVICE (self),
g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
"Device '%s' is not a V4L2 device", priv->device_file));
return;
}
+ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "Device '%s' is not video capture device",
+ priv->device_file));
+ return;
+ }
+
+#if 0
+ if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) != 0) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "Multiplanar capture of device '%s' is not supported",
+ priv->device_file));
+ return;
+ }
+#endif
+
+ if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "Device '%s' does not support streaming",
+ priv->device_file));
+ return;
+ }
+
+#if 0
+ if (!(cap.capabilities & V4L2_CAP_READWRITE)) {
+ arv_device_take_init_error (ARV_DEVICE (self),
+ g_error_new (ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_NOT_FOUND,
+ "Device '%s' does not support read",
+ priv->device_file));
+ return;
+ }
+#endif
+
+ crop_cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ /* Reset cropping region */
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_CROPCAP, &crop_cap) == 0) {
+ struct v4l2_crop crop = {0};
+
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = crop_cap.defrect; /* reset to default */
+
+ arv_v4l2_ioctl (priv->device_fd, VIDIOC_S_CROP, &crop);
+ }
+
+ priv->sensor_width = crop_cap.bounds.width;
+ priv->sensor_height = crop_cap.bounds.height;
+
priv->device_card = g_strdup ((char *) cap.card);
priv->device_driver = g_strdup ((char *) cap.driver);
priv->device_version = g_strdup_printf ("%d.%d.%d",
@@ -243,23 +772,179 @@ arv_v4l2_device_constructed (GObject *self)
"Invalid Genicam data"));
return;
}
+
+ /* Get gain infos */
+
+ {
+ struct v4l2_queryctrl queryctrl = {0};
+
+ queryctrl.id = V4L2_CID_GAIN;
+ if (ioctl (priv->device_fd, VIDIOC_QUERYCTRL, &queryctrl) != -1) {
+ priv->gain_available = TRUE;
+ priv->gain_min = queryctrl.minimum;
+ priv->gain_max = queryctrl.maximum;
+ } else {
+ priv->gain_available = FALSE;
+ }
+ }
+
+ /* Get Exposure infos */
+
+ {
+ struct v4l2_query_ext_ctrl query = {0};
+
+ query.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+ if (ioctl (priv->device_fd, VIDIOC_QUERY_EXT_CTRL, &query) != -1) {
+ priv->exposure_available = TRUE;
+ priv->exposure_min = query.minimum * 100;
+ priv->exposure_max = query.maximum * 100;
+ } else {
+ priv->exposure_available = FALSE;
+ }
+
+ query.id = V4L2_CID_EXPOSURE_AUTO;
+ if (ioctl (priv->device_fd, VIDIOC_QUERY_EXT_CTRL, &query) != -1) {
+ priv->exposure_auto_index = -1;
+ priv->exposure_manual_index = -1;
+
+ for (i = query.minimum; i <= query.maximum; i++) {
+ struct v4l2_querymenu querymenu = {0};
+
+ querymenu.id = V4L2_CID_EXPOSURE_AUTO;
+ querymenu.index = i;
+
+ if (ioctl (priv->device_fd, VIDIOC_QUERYMENU, &querymenu) != -1) {
+ if (i == V4L2_EXPOSURE_MANUAL)
+ priv->exposure_manual_index = i;
+ else
+ priv->exposure_auto_index = i;
+ }
+ }
+ }
+ }
+
+ /* Enumerate pixel formats */
+
+ priv->pixel_formats = g_array_new (FALSE, TRUE, sizeof (guint32));
+ priv->frame_sizes = g_array_new(FALSE, TRUE, sizeof (struct v4l2_frmsizeenum));
+
+ format_feature = g_string_new ("\n"
+ " Pixel format\n");
+
+ for (i = 0; TRUE; i++) {
+ int j;
+ struct v4l2_fmtdesc format = {0};
+ guint32 genicam_pixel_format = 0;
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.index = i;
+ if (arv_v4l2_ioctl (priv->device_fd, VIDIOC_ENUM_FMT, &format) == -1)
+ break;
+
+ genicam_pixel_format = arv_pixel_format_from_v4l2(format.pixelformat);
+
+ g_array_insert_val (priv->pixel_formats, i, genicam_pixel_format);
+
+ if (genicam_pixel_format == 0) {
+ arv_info_device ("Format %s ignored", format.description);
+ continue;
+ } else {
+ arv_info_device ("Format %s found", format.description);
+ }
+
+ g_string_append_printf (format_feature,
+ " \n"
+ " %d\n"
+ " \n",
+ format.description,
+ genicam_pixel_format);
+
+ priv->pixel_format_idx = i;
+
+ for (j = 0; TRUE; j++) {
+ struct v4l2_frmsizeenum frame_size = {0};
+
+ frame_size.index = j;
+ frame_size.pixel_format = format.pixelformat;
+
+ if (arv_v4l2_ioctl(priv->device_fd, VIDIOC_ENUM_FRAMESIZES, &frame_size) == -1)
+ break;
+
+ if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ arv_debug_device (" %d×%d", frame_size.discrete.width, frame_size.discrete.height);
+ } else {
+ arv_debug_device (" (%d to %d)×(%d to %d) ",
+ frame_size.stepwise.min_width,
+ frame_size.stepwise.max_width,
+ frame_size.stepwise.min_height,
+ frame_size.stepwise.max_height);
+ }
+
+ if (j == 0)
+ g_array_insert_val (priv->frame_sizes, i, frame_size);
+ }
+ }
+
+ g_array_set_size (priv->pixel_formats, i);
+ g_array_set_size (priv->frame_sizes, i);
+
+ g_string_append_printf (format_feature,
+ " PixelFormatRegister\n"
+ "");
+
+ feature = g_strdup_printf ("\n"
+ " Full height of image sensor.\n"
+ " %u\n"
+ " RO\n"
+ "", priv->sensor_height);
+ arv_gc_set_default_node_data (priv->genicam, "SensorHeight", feature, NULL);
+ g_free (feature);
+
+ feature = g_strdup_printf ("\n"
+ " Full width of image sensor.\n"
+ " %u\n"
+ " RO\n"
+ "", priv->sensor_width);
+ arv_gc_set_default_node_data (priv->genicam, "SensorWidth", feature, NULL);
+ g_free (feature);
+
+ feature = g_strdup_printf ("\n"
+ " %d\n"
+ " RO\n"
+ "", priv->gain_available ? 1 : 0);
+ arv_gc_set_default_node_data (priv->genicam, "GainAvailable", feature, NULL);
+ g_free (feature);
+
+ feature = g_strdup_printf ("\n"
+ " %d\n"
+ " RO\n"
+ "", priv->exposure_available ? 1 : 0);
+ arv_gc_set_default_node_data (priv->genicam, "ExposureAvailable", feature, NULL);
+ g_free (feature);
+
+ arv_gc_set_default_node_data (priv->genicam, "PixelFormat", format_feature->str, NULL);
+ g_string_free (format_feature, TRUE);
}
static void
-arv_v4l2_device_set_property (GObject *self, guint prop_id, const GValue *value, GParamSpec *pspec)
+arv_v4l2_device_finalize (GObject *object)
{
- ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (self));
+ ArvV4l2DevicePrivate *priv = arv_v4l2_device_get_instance_private (ARV_V4L2_DEVICE (object));
- switch (prop_id)
- {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
- break;
- case PROP_V4L2_DEVICE_DEVICE_FILE:
- g_free (priv->device_file);
- priv->device_file = g_value_dup_string (value);
- break;
- }
+ if (priv->device_fd != -1)
+ close (priv->device_fd);
+
+ g_clear_object (&priv->genicam);
+ g_clear_pointer (&priv->genicam_xml, g_free);
+ g_clear_pointer (&priv->device_file, g_free);
+ g_clear_pointer (&priv->device_version, g_free);
+ g_clear_pointer (&priv->device_driver, g_free);
+ g_clear_pointer (&priv->device_card, g_free);
+
+ g_array_unref (priv->frame_sizes);
+ g_array_unref (priv->pixel_formats);
+
+ G_OBJECT_CLASS (arv_v4l2_device_parent_class)->finalize (object);
}
static void
diff --git a/src/arvv4l2deviceprivate.h b/src/arvv4l2deviceprivate.h
index 482b6b16c..7ac2c1038 100644
--- a/src/arvv4l2deviceprivate.h
+++ b/src/arvv4l2deviceprivate.h
@@ -31,6 +31,12 @@
G_BEGIN_DECLS
+int arv_v4l2_device_get_fd (ArvV4l2Device *v4l2_device);
+gboolean arv_v4l2_device_set_image_format (ArvV4l2Device *device);
+gboolean arv_v4l2_device_get_image_format (ArvV4l2Device *device,
+ guint32 *payload_size, ArvPixelFormat *pixel_format,
+ guint32 *width, guint32 *height, guint32 *bytes_per_line);
+
G_END_DECLS
#endif
diff --git a/src/arvv4l2interface.c b/src/arvv4l2interface.c
index ee823b396..539ff6256 100644
--- a/src/arvv4l2interface.c
+++ b/src/arvv4l2interface.c
@@ -27,15 +27,17 @@
#include
#include
+#include
#include
#include
-#include
+#include
+#include
+#include
#include
-#include
+#include
+#include
#include
-#include
-#include
-#include
+#include
struct _ArvV4l2Interface {
ArvInterface interface;
@@ -52,47 +54,102 @@ G_DEFINE_TYPE (ArvV4l2Interface, arv_v4l2_interface, ARV_TYPE_INTERFACE)
typedef struct {
char *id;
+ char *model;
+ char *driver;
char *bus;
char *device_file;
char *version;
+ char *serial_nbr;
volatile gint ref_count;
} ArvV4l2InterfaceDeviceInfos;
static ArvV4l2InterfaceDeviceInfos *
-arv_v4l2_interface_device_infos_new (const char *device_file)
+arv_v4l2_interface_device_infos_new (const char *device_file, const char *name)
{
- ArvV4l2InterfaceDeviceInfos *infos;
+ ArvV4l2InterfaceDeviceInfos *infos = NULL;
g_return_val_if_fail (device_file != NULL, NULL);
if (strncmp ("/dev/vbi", device_file, 8) != 0) {
int fd;
+ struct stat st;
+
+ if (stat(device_file, &st) == -1)
+ return NULL;
- fd = v4l2_open (device_file, O_RDWR);
- if (fd != -1) {
- struct v4l2_capability cap;
+ if (!S_ISCHR(st.st_mode))
+ return NULL;
- if (v4l2_ioctl (fd, VIDIOC_QUERYCAP, &cap) != -1 &&
+ fd = open (device_file, O_RDWR, 0);
+ if (fd != -1) {
+ struct v4l2_capability cap;
+
+ if (ioctl (fd, VIDIOC_QUERYCAP, &cap) != -1 &&
((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) != 0) &&
- ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) == 0) &&
- ((cap.capabilities & V4L2_CAP_READWRITE) != 0)) {
- infos = g_new0 (ArvV4l2InterfaceDeviceInfos, 1);
-
- infos->ref_count = 1;
- infos->id = g_strdup ((char *) cap.card);
- infos->bus = g_strdup ((char *) cap.bus_info);
- infos->device_file = g_strdup (device_file);
- infos->version = g_strdup_printf ("%d.%d.%d",
- (cap.version >> 16) & 0xff,
- (cap.version >> 8) & 0xff,
- (cap.version >> 0) & 0xff);
-
- return infos;
- }
- v4l2_close (fd);
- }
- }
+ ((cap.capabilities & V4L2_CAP_STREAMING) != 0)) {
+ unsigned int i;
+ gboolean found = FALSE;
+ struct media_device_info mdinfo = {0};
+
+ for (i = 0; TRUE; i++) {
+ struct v4l2_fmtdesc format = {0};
+
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.index = i;
+ if (ioctl(fd, VIDIOC_ENUM_FMT, &format) == -1)
+ break;
+
+ if (arv_pixel_format_from_v4l2(format.pixelformat) != 0) {
+ found = TRUE;
+ break;
+ }
+ }
+
+ if (found) {
+ int media_fd = arv_v4l2_get_media_fd(fd, (char *) cap.bus_info);
+
+ infos = g_new0 (ArvV4l2InterfaceDeviceInfos, 1);
+
+ infos->ref_count = 1;
+ infos->bus = g_strdup ((char *) cap.bus_info);
+ infos->driver = g_strdup ((char *) cap.driver);
+ infos->device_file = g_strdup (device_file);
+ infos->model = g_strdup ((char *) cap.card);
+ infos->version = g_strdup_printf ("%d.%d.%d",
+ (cap.version >> 16) & 0xff,
+ (cap.version >> 8) & 0xff,
+ (cap.version >> 0) & 0xff);
+
+ if (media_fd != -1 &&
+ ioctl (media_fd, MEDIA_IOC_DEVICE_INFO, &mdinfo) != -1) {
+ infos->id = g_strdup_printf ("%s-%s-%s",
+ (char *) cap.driver,
+ (char *) cap.card,
+ mdinfo.serial);
+ infos->serial_nbr = g_strdup (mdinfo.serial);
+ } else {
+ infos->id = g_strdup_printf ("%s-%s-%s",
+ (char *) cap.driver,
+ (char *) cap.card,
+ name);
+ infos->serial_nbr = g_strdup (device_file);
+ }
+
+ if (media_fd != -1)
+ close (media_fd);
+
+ close (fd);
+
+ return infos;
+ }
+
+ arv_info_interface ("No suitable pixel format found for v4l2 device '%s'",
+ device_file);
+ }
+ close (fd);
+ }
+ }
return NULL;
}
@@ -116,9 +173,12 @@ arv_v4l2_interface_device_infos_unref (ArvV4l2InterfaceDeviceInfos *infos)
if (g_atomic_int_dec_and_test (&infos->ref_count)) {
g_free (infos->id);
+ g_free (infos->model);
+ g_free (infos->driver);
g_free (infos->bus);
g_free (infos->device_file);
g_free (infos->version);
+ g_free (infos->serial_nbr);
g_free (infos);
}
}
@@ -135,7 +195,8 @@ _discover (ArvV4l2Interface *v4l2_interface, GArray *device_ids)
for (elem = g_list_first (devices); elem; elem = g_list_next (elem)) {
ArvV4l2InterfaceDeviceInfos *device_infos;
- device_infos = arv_v4l2_interface_device_infos_new (g_udev_device_get_device_file (elem->data));
+ device_infos = arv_v4l2_interface_device_infos_new (g_udev_device_get_device_file (elem->data),
+ g_udev_device_get_name(elem->data));
if (device_infos != NULL) {
ArvInterfaceDeviceIds *ids;
@@ -155,9 +216,9 @@ _discover (ArvV4l2Interface *v4l2_interface, GArray *device_ids)
ids->device = g_strdup (device_infos->id);
ids->physical = g_strdup (device_infos->bus);
ids->address = g_strdup (device_infos->device_file);
- ids->vendor = g_strdup ("Aravis");
- ids->model = g_strdup (device_infos->id);
- ids->serial_nbr = g_strdup ("1");
+ ids->vendor = g_strdup (device_infos->driver);
+ ids->model = g_strdup (device_infos->model);
+ ids->serial_nbr = g_strdup (device_infos->serial_nbr);
ids->protocol = "V4L2";
g_array_append_val (device_ids, ids);
diff --git a/src/arvv4l2misc.c b/src/arvv4l2misc.c
new file mode 100644
index 000000000..0c1f7127c
--- /dev/null
+++ b/src/arvv4l2misc.c
@@ -0,0 +1,210 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+typedef struct {
+ guint32 v4l2;
+ guint32 genicam;
+} ArvV4l2GenicamPixelFormat;
+
+static ArvV4l2GenicamPixelFormat pixel_format_map[] = {
+ {V4L2_PIX_FMT_YUYV, ARV_PIXEL_FORMAT_YUV_422_YUYV_PACKED},
+/* Disable these formats for now, makes gstreamer crash:
+ {V4L2_PIX_FMT_RGB24, ARV_PIXEL_FORMAT_RGB_8_PACKED},
+ {V4L2_PIX_FMT_BGR24, ARV_PIXEL_FORMAT_BGR_8_PACKED},
+*/
+};
+
+ArvPixelFormat
+arv_pixel_format_from_v4l2 (guint32 v4l2_pixel_format)
+{
+ unsigned int i;
+
+ for (i = 0; i < G_N_ELEMENTS(pixel_format_map); i++) {
+ if (v4l2_pixel_format == pixel_format_map[i].v4l2)
+ return pixel_format_map[i].genicam;
+ }
+
+ return 0;
+}
+
+guint32
+arv_pixel_format_to_v4l2 (ArvPixelFormat pixel_format)
+{
+ unsigned int i;
+
+ for (i = 0; i < G_N_ELEMENTS (pixel_format_map); i++) {
+ if (pixel_format_map[i].genicam == pixel_format) {
+ return pixel_format_map[i].v4l2;
+ }
+ }
+
+ return 0;
+}
+
+int
+arv_v4l2_ioctl (int fd, int request, void *arg)
+{
+ int result;
+
+ g_return_val_if_fail (fd != -1, -1);
+
+ do {
+ result = ioctl (fd, request, arg);
+ } while (-1 == result && EINTR == errno);
+
+ return result;
+}
+
+int
+arv_v4l2_get_media_fd (int fd, const char *bus_info)
+{
+ DIR *dp;
+ struct dirent *ep;
+ int media_fd = -1;
+ struct stat sb = {0};
+ dev_t dev;
+ char *media_path;
+
+ if (fstat(fd, &sb) == -1)
+ return -1;
+
+ dev = sb.st_rdev;
+
+ media_path = g_strdup_printf ("/sys/dev/char/%d:%d/device", major(dev), minor(dev));
+ if (media_path == NULL)
+ return -1;
+
+ dp = opendir(media_path);
+ g_clear_pointer (&media_path, g_free);
+
+ if (dp == NULL)
+ return -1;
+
+ while ((ep = readdir(dp))) {
+ if (!memcmp(ep->d_name, "media", 5) && isdigit(ep->d_name[5])) {
+ struct media_device_info mdinfo;
+ char *devname;
+
+ devname = g_strdup_printf ("/dev/%s", ep->d_name);
+ media_fd = open(devname, O_RDWR);
+ g_free (devname);
+
+ if (bus_info &&
+ (ioctl(media_fd, MEDIA_IOC_DEVICE_INFO, &mdinfo) ||
+ strcmp(mdinfo.bus_info, bus_info))) {
+ close(media_fd);
+ continue;
+ }
+ break;
+ }
+ }
+ closedir(dp);
+
+ return media_fd;
+}
+
+gboolean
+arv_v4l2_set_ctrl (int fd, int ctrl_id, gint32 value)
+{
+ struct v4l2_control control = {0};
+
+ control.id = ctrl_id;
+ control.value = value;
+
+ return ioctl (fd, VIDIOC_S_CTRL, &control) == 0;
+}
+
+gint32
+arv_v4l2_get_ctrl (int fd, int ctrl_id)
+{
+ struct v4l2_control control = {0};
+
+ control.id = ctrl_id;
+
+ if (ioctl (fd, VIDIOC_G_CTRL, &control) == 0)
+ return control.value;
+
+ return 0;
+}
+
+gint64
+arv_v4l2_get_int64_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id)
+{
+ struct v4l2_ext_controls ext_controls = {0};
+ struct v4l2_ext_control ext_control = {0};
+
+ ext_controls.ctrl_class = ext_ctrl_class;
+ ext_controls.which = V4L2_CTRL_WHICH_CUR_VAL;
+ ext_controls.count = 1;
+ ext_controls.controls = &ext_control;
+ ext_control.id = ext_ctrl_id;
+
+ if (ioctl (fd, VIDIOC_G_EXT_CTRLS, &ext_controls) == -1)
+ return 0;
+
+ return ext_control.value64;
+}
+
+gboolean
+arv_v4l2_set_int64_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id, gint64 value)
+{
+ struct v4l2_ext_controls ext_controls = {0};
+ struct v4l2_ext_control ext_control = {0};
+
+ ext_controls.ctrl_class = ext_ctrl_class;
+ ext_controls.which = V4L2_CTRL_WHICH_CUR_VAL;
+ ext_controls.count = 1;
+ ext_controls.controls = &ext_control;
+ ext_control.id = ext_ctrl_id;
+ ext_control.value64 = value;
+
+ if (ioctl (fd, VIDIOC_S_EXT_CTRLS, &ext_controls) == -1)
+ return FALSE;
+
+ return TRUE;
+}
+
+gint32
+arv_v4l2_get_int32_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id)
+{
+ struct v4l2_ext_controls ext_controls = {0};
+ struct v4l2_ext_control ext_control = {0};
+
+ ext_controls.ctrl_class = ext_ctrl_class;
+ ext_controls.which = V4L2_CTRL_WHICH_CUR_VAL;
+ ext_controls.count = 1;
+ ext_controls.controls = &ext_control;
+ ext_control.id = ext_ctrl_id;
+
+ if (ioctl (fd, VIDIOC_G_EXT_CTRLS, &ext_controls) == -1)
+ return 0;
+
+ return ext_control.value;
+}
+
+gboolean
+arv_v4l2_set_int32_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id, gint32 value)
+{
+ struct v4l2_ext_controls ext_controls = {0};
+ struct v4l2_ext_control ext_control = {0};
+
+ ext_controls.ctrl_class = ext_ctrl_class;
+ ext_controls.which = V4L2_CTRL_WHICH_CUR_VAL;
+ ext_controls.count = 1;
+ ext_controls.controls = &ext_control;
+ ext_control.id = ext_ctrl_id;
+ ext_control.value = value;
+
+ if (ioctl (fd, VIDIOC_S_EXT_CTRLS, &ext_controls) == -1)
+ return FALSE;
+
+ return TRUE;
+}
diff --git a/src/arvv4l2miscprivate.h b/src/arvv4l2miscprivate.h
new file mode 100644
index 000000000..0c65bbffb
--- /dev/null
+++ b/src/arvv4l2miscprivate.h
@@ -0,0 +1,51 @@
+/* Aravis - Digital camera library
+ *
+ * Copyright © 2009-2021 Emmanuel Pacaud
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General
+ * Public License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ * Author: Emmanuel Pacaud
+ */
+
+#ifndef ARV_V4L2_MISC_PRIVATE_H
+#define ARV_V4L2_MISC_PRIVATE_H
+
+#if !defined (ARV_H_INSIDE) && !defined (ARAVIS_COMPILATION)
+#error "Only can be included directly."
+#endif
+
+#include
+
+G_BEGIN_DECLS
+
+ArvPixelFormat arv_pixel_format_from_v4l2 (guint32 v4l2_pixel_format);
+guint32 arv_pixel_format_to_v4l2 (ArvPixelFormat pixel_format);
+
+int arv_v4l2_ioctl (int fd, int request, void *arg);
+int arv_v4l2_get_media_fd (int fd, const char *bus_info);
+
+gboolean arv_v4l2_set_ctrl (int fd, int ctrl_id, gint32 value);
+gint32 arv_v4l2_get_ctrl (int fd, int ctrl_id);
+
+gint64 arv_v4l2_get_int64_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id);
+gboolean arv_v4l2_set_int64_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id, gint64 value);
+
+gint32 arv_v4l2_get_int32_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id);
+gboolean arv_v4l2_set_int32_ext_ctrl (int fd, int ext_ctrl_class, int ext_ctrl_id, gint32 value);
+
+G_END_DECLS
+
+#endif
diff --git a/src/arvv4l2stream.c b/src/arvv4l2stream.c
index f8a88a755..d9dade1a8 100644
--- a/src/arvv4l2stream.c
+++ b/src/arvv4l2stream.c
@@ -26,26 +26,64 @@
*/
#include
-#include
+#include
+#include
#include
#include
#include
#include
+#include
+#include
+#include
+#include
+#include
+
+#define ARV_V4L2_STREAM_N_BUFFERS 3
+
+typedef enum {
+ ARV_V4L2_STREAM_IO_METHOD_UNKNOWN = -1,
+ ARV_V4L2_STREAM_IO_METHOD_READ,
+ ARV_V4L2_STREAM_IO_METHOD_MMAP,
+ ARV_V4L2_STREAM_IO_METHOD_USER_POINTER
+} ArvV4l2StreamIOMethod;
+
+typedef struct {
+ ArvV4l2Device *v4l2_device;
+ void *data;
+ size_t size;
+ int index;
+} ArvV4l2StreamBufferData;
typedef struct {
ArvStream *stream;
+ gboolean thread_started;
+ GMutex thread_started_mutex;
+ GCond thread_started_cond;
+
ArvV4l2Device *v4l2_device;
ArvStreamCallback callback;
void *callback_data;
gboolean cancel;
+ int v4l2_fd;
+
+ ArvV4l2StreamIOMethod io_method;
+
+ ArvPixelFormat pixel_format;
+ guint32 image_width;
+ guint32 image_height;
+ guint32 image_x_padding;
+
+ guint32 frame_id;
+
/* Statistics */
guint n_completed_buffers;
guint n_failures;
guint n_underruns;
+ guint n_transferred_bytes;
} ArvV4l2StreamThreadData;
typedef struct {
@@ -66,37 +104,150 @@ G_DEFINE_TYPE_WITH_CODE (ArvV4l2Stream, arv_v4l2_stream, ARV_TYPE_STREAM, G_ADD_
/* Acquisition thread */
+static void
+_queue_buffers (ArvV4l2StreamThreadData *thread_data, GHashTable *buffers)
+{
+ ArvBuffer *arv_buffer;
+ struct v4l2_buffer bufd = {0};
+
+ do {
+ arv_buffer = arv_stream_pop_input_buffer (thread_data->stream);
+ if (ARV_IS_BUFFER (arv_buffer)) {
+ memset (&bufd, 0, sizeof bufd);
+ bufd.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ bufd.index = GPOINTER_TO_INT (g_object_get_data (G_OBJECT (arv_buffer), "v4l2-index"));
+ if (thread_data->io_method == ARV_V4L2_STREAM_IO_METHOD_MMAP) {
+ bufd.memory = V4L2_MEMORY_MMAP;
+ } else {
+ bufd.memory = V4L2_MEMORY_USERPTR;
+ bufd.m.userptr = (unsigned long) arv_buffer->priv->data;
+ bufd.length = arv_buffer->priv->allocated_size;
+ }
+
+ if (arv_v4l2_ioctl (thread_data->v4l2_fd, VIDIOC_QBUF, &bufd) == -1) {
+ arv_warning_stream_thread ("Failed to queue v4l2 buffer (%s)",
+ strerror (errno));
+ arv_stream_push_output_buffer(thread_data->stream, arv_buffer);
+ } else {
+ arv_trace_stream_thread ("Queue buffer %d\n", bufd.index);
+ g_hash_table_replace (buffers, GINT_TO_POINTER (bufd.index), arv_buffer);
+ }
+ }
+ } while (arv_buffer != NULL);
+}
+
static void *
arv_v4l2_stream_thread (void *data)
{
ArvV4l2StreamThreadData *thread_data = data;
- ArvBuffer *buffer;
+ GHashTable *buffers;
+ GHashTableIter iter;
+ gpointer key, value;
+ ArvBuffer *arv_buffer;
arv_info_stream_thread ("[V4l2Stream::thread] Start");
if (thread_data->callback != NULL)
thread_data->callback (thread_data->callback_data, ARV_STREAM_CALLBACK_TYPE_INIT, NULL);
+ buffers = g_hash_table_new (g_direct_hash, g_direct_equal);
+ _queue_buffers(thread_data, buffers);
+
+ g_mutex_lock (&thread_data->thread_started_mutex);
+ thread_data->thread_started = TRUE;
+ g_cond_signal (&thread_data->thread_started_cond);
+ g_mutex_unlock (&thread_data->thread_started_mutex);
+
while (!g_atomic_int_get (&thread_data->cancel)) {
- sleep(1);
- buffer = arv_stream_pop_input_buffer (thread_data->stream);
- if (buffer != NULL) {
- if (thread_data->callback != NULL)
- thread_data->callback (thread_data->callback_data, ARV_STREAM_CALLBACK_TYPE_START_BUFFER,
- NULL);
-
- if (buffer->priv->status == ARV_BUFFER_STATUS_SUCCESS)
- thread_data->n_completed_buffers++;
- else
- thread_data->n_failures++;
- arv_stream_push_output_buffer (thread_data->stream, buffer);
-
- if (thread_data->callback != NULL)
- thread_data->callback (thread_data->callback_data, ARV_STREAM_CALLBACK_TYPE_BUFFER_DONE,
- buffer);
- } else
- thread_data->n_underruns++;
- }
+ struct v4l2_buffer bufd = {0};
+ fd_set fds;
+ struct timeval tv;
+ int result;
+
+ _queue_buffers(thread_data, buffers);
+
+ FD_ZERO(&fds);
+ FD_SET(thread_data->v4l2_fd, &fds);
+
+ tv.tv_sec = 1;
+ tv.tv_usec = 0;
+ result = select(thread_data->v4l2_fd + 1, &fds, NULL, NULL, &tv);
+ if(result == -1){
+ if (errno != EINTR)
+ arv_warning_stream_thread ("Error while waiting for frame (%s)", strerror(errno));
+ continue;
+ }
+
+ if (result == 0)
+ continue;
+
+ memset (&bufd, 0, sizeof bufd);
+ bufd.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ bufd.memory = thread_data->io_method == ARV_V4L2_STREAM_IO_METHOD_MMAP ?
+ V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR;
+ bufd.index = 0;
+
+ if(arv_v4l2_ioctl(thread_data->v4l2_fd, VIDIOC_DQBUF, &bufd) == -1) {
+ arv_warning_stream_thread("DeQueue buffer error (%s)", strerror(errno));
+ switch (errno) {
+ case EAGAIN:
+ continue;
+ }
+ } else
+ arv_trace_stream_thread ("Dequeued buffer %d\n", bufd.index);
+
+ arv_buffer = g_hash_table_lookup (buffers, GINT_TO_POINTER (bufd.index));
+ if (ARV_IS_BUFFER (arv_buffer)) {
+ if (thread_data->callback != NULL)
+ thread_data->callback (thread_data->callback_data,
+ ARV_STREAM_CALLBACK_TYPE_START_BUFFER,
+ arv_buffer);
+
+ g_hash_table_remove (buffers, GINT_TO_POINTER(bufd.index));
+ arv_buffer->priv->payload_type = ARV_BUFFER_PAYLOAD_TYPE_IMAGE;
+ arv_buffer->priv->chunk_endianness = G_BIG_ENDIAN;
+ arv_buffer->priv->status = ARV_BUFFER_STATUS_SUCCESS;
+ /* TODO: sometime bufd->timestamp is set to 0, and bufd.flags is also 0. When timestamp is
+ * correct, flags are set to 0x00012000
+ * (V4L2_BUF_FLAG_TSTAMP_SRC_SOE | V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC ) */
+ arv_buffer->priv->timestamp_ns = 1000000000L * bufd.timestamp.tv_sec +
+ 1000L * bufd.timestamp.tv_usec;
+ arv_buffer->priv->system_timestamp_ns = g_get_real_time () * 1000;
+ arv_buffer->priv->frame_id = thread_data->frame_id++;
+ arv_buffer->priv->received_size = bufd.bytesused;
+
+ arv_buffer_set_n_parts(arv_buffer, 1);
+ arv_buffer->priv->parts[0].data_offset = 0;
+ arv_buffer->priv->parts[0].component_id = 0;
+ arv_buffer->priv->parts[0].data_type = ARV_BUFFER_PART_DATA_TYPE_2D_IMAGE;
+ arv_buffer->priv->parts[0].pixel_format = thread_data->pixel_format;
+ arv_buffer->priv->parts[0].width = thread_data->image_width;
+ arv_buffer->priv->parts[0].height = thread_data->image_height;
+ arv_buffer->priv->parts[0].x_offset = 0;
+ arv_buffer->priv->parts[0].y_offset = 0;
+ arv_buffer->priv->parts[0].x_padding = thread_data->image_x_padding;
+ arv_buffer->priv->parts[0].y_padding = 0;
+ arv_buffer->priv->parts[0].size = arv_buffer->priv->received_size;
+
+ arv_trace_stream_thread("size = %zu", arv_buffer->priv->received_size);
+
+ thread_data->n_completed_buffers++;
+ thread_data->n_transferred_bytes += bufd.length;
+ arv_stream_push_output_buffer (thread_data->stream, arv_buffer);
+ if (thread_data->callback != NULL)
+ thread_data->callback (thread_data->callback_data,
+ ARV_STREAM_CALLBACK_TYPE_BUFFER_DONE,
+ arv_buffer);
+ } else
+ arv_warning_stream_thread("buffer for index %d not found", bufd.index);
+ }
+
+ g_hash_table_iter_init (&iter, buffers);
+ while (g_hash_table_iter_next (&iter, &key, &value))
+ {
+ arv_stream_push_output_buffer (thread_data->stream, value);
+ }
+ g_hash_table_unref (buffers);
if (thread_data->callback != NULL)
thread_data->callback (thread_data->callback_data, ARV_STREAM_CALLBACK_TYPE_EXIT, NULL);
@@ -106,7 +257,7 @@ arv_v4l2_stream_thread (void *data)
return NULL;
}
-/* ArvV4l2Stream implemenation */
+/* ArvV4l2Stream implementation */
static gboolean
arv_v4l2_stream_start_acquisition (ArvStream *stream, GError **error)
@@ -114,15 +265,116 @@ arv_v4l2_stream_start_acquisition (ArvStream *stream, GError **error)
ArvV4l2Stream *v4l2_stream = ARV_V4L2_STREAM (stream);
ArvV4l2StreamPrivate *priv = arv_v4l2_stream_get_instance_private (v4l2_stream);
ArvV4l2StreamThreadData *thread_data;
+ ArvBuffer *buffer;
+ gboolean mixed_io_method = FALSE;
+ guint32 bit_per_pixel;
+ guint32 bytes_per_line;
+ guint32 payload_size;
+ guint32 width_pixels;
+ guint32 index = 0;
+ guint32 n_buffers = 0;
g_return_val_if_fail (priv->thread == NULL, FALSE);
g_return_val_if_fail (priv->thread_data != NULL, FALSE);
thread_data = priv->thread_data;
thread_data->cancel = FALSE;
+ thread_data->thread_started = FALSE;
+ thread_data->io_method = ARV_V4L2_STREAM_IO_METHOD_UNKNOWN;
+
+ /* Move buffers from input queue to output queue. They will be pushed back to the input queue after the
+ * processing below */
+ do {
+ buffer = arv_stream_pop_input_buffer(stream);
+ if (ARV_IS_BUFFER(buffer))
+ arv_stream_push_output_buffer(stream, buffer);
+ } while (buffer != NULL);
+
+ /* Detect if the IO methods are mixed, and compute the number of buffers */
+ do {
+ buffer = arv_stream_try_pop_buffer (stream);
+ if (ARV_IS_BUFFER(buffer)) {
+ ArvV4l2StreamBufferData *buffer_data;
+
+ buffer_data = g_object_get_data (G_OBJECT(buffer), "v4l2-buffer-data");
+ if (buffer_data != NULL) {
+ if (thread_data->io_method != ARV_V4L2_STREAM_IO_METHOD_UNKNOWN &&
+ thread_data->io_method != ARV_V4L2_STREAM_IO_METHOD_MMAP)
+ mixed_io_method = TRUE;
+ thread_data->io_method = ARV_V4L2_STREAM_IO_METHOD_MMAP;
+ } else {
+ if (thread_data->io_method != ARV_V4L2_STREAM_IO_METHOD_UNKNOWN &&
+ thread_data->io_method != ARV_V4L2_STREAM_IO_METHOD_USER_POINTER)
+ mixed_io_method = TRUE;
+ thread_data->io_method = ARV_V4L2_STREAM_IO_METHOD_USER_POINTER;
+ g_object_set_data (G_OBJECT(buffer), "v4l2-index", GINT_TO_POINTER(index));
+ index++;
+ }
+
+ arv_stream_push_buffer(stream, buffer);
+ }
+ } while (buffer != NULL);
+ n_buffers = index;
+
+ if (mixed_io_method) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "V4l2 mixed IO method not allowed (mmap and read)");
+ return FALSE;
+ }
+
+ /* User pointer IO method is used for non native buffers */
+ if (thread_data->io_method == ARV_V4L2_STREAM_IO_METHOD_USER_POINTER) {
+ struct v4l2_requestbuffers req = {0};
+
+ req.count = n_buffers;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ if (arv_v4l2_ioctl(priv->thread_data->v4l2_fd, VIDIOC_REQBUFS, &req) == -1) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "V4l2 user pointer method not supported (%s)",
+ strerror (errno));
+ return FALSE;
+ }
+ }
+
+ if (!arv_v4l2_device_get_image_format (priv->thread_data->v4l2_device,
+ &payload_size,
+ &thread_data->pixel_format,
+ &thread_data->image_width,
+ &thread_data->image_height,
+ &bytes_per_line)) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Failed to query v4l2 image format");
+ return FALSE;
+ }
+
+ bit_per_pixel = ARV_PIXEL_FORMAT_BIT_PER_PIXEL (thread_data->pixel_format);
+ if ( bit_per_pixel < 1 ||
+ thread_data->image_height * bytes_per_line > payload_size) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Invalid v4l2 pixel format");
+ return FALSE;
+ }
+
+ width_pixels = (thread_data->image_width / bit_per_pixel + 7) * 8;
+ if (bytes_per_line < width_pixels) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Invalid v4l2 pixel format");
+ return FALSE;
+ }
+
+ thread_data->image_x_padding = bytes_per_line - width_pixels;
+ thread_data->frame_id = 0;
priv->thread = g_thread_new ("arv_v4l2_stream", arv_v4l2_stream_thread, priv->thread_data);
+ g_mutex_lock (&thread_data->thread_started_mutex);
+ while (!thread_data->thread_started)
+ g_cond_wait (&thread_data->thread_started_cond,
+ &thread_data->thread_started_mutex);
+ g_mutex_unlock (&thread_data->thread_started_mutex);
+
return TRUE;
}
@@ -146,6 +398,103 @@ arv_v4l2_stream_stop_acquisition (ArvStream *stream, GError **error)
return TRUE;
}
+static void
+_buffer_data_destroy_func (gpointer data)
+{
+ ArvV4l2StreamBufferData *buffer_data = data;
+
+ arv_debug_stream ("free data %p size %zu\n", buffer_data->data, buffer_data->size);
+ arv_debug_stream ("v4l2 device %p\n", buffer_data->v4l2_device);
+
+ munmap (buffer_data->data, buffer_data->size);
+
+ g_object_unref (buffer_data->v4l2_device);
+
+ g_free (buffer_data);
+}
+
+static gboolean
+arv_v4l2_stream_create_buffers (ArvStream *stream, guint n_buffers, size_t size,
+ void *user_data, GDestroyNotify user_data_destroy_func,
+ GError **error)
+{
+ ArvV4l2Stream *v4l2_stream = ARV_V4L2_STREAM (stream);
+ ArvV4l2StreamPrivate *priv = arv_v4l2_stream_get_instance_private (v4l2_stream);
+ struct v4l2_requestbuffers req = {0};
+ guint i;
+
+ if (!arv_v4l2_device_set_image_format (priv->thread_data->v4l2_device)) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Failed to set image format (%s)",
+ strerror(errno));
+ return FALSE;
+ }
+
+ req.count = n_buffers;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+ if (arv_v4l2_ioctl(priv->thread_data->v4l2_fd, VIDIOC_REQBUFS, &req) == -1) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Failed to request v4l2 buffer (%s)",
+ strerror(errno));
+ return FALSE;
+ }
+
+ if (req.count < 2) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Failed to request enough v4l2 buffer (%s)",
+ strerror(errno));
+ return FALSE;
+ }
+
+ if (req.count != n_buffers)
+ arv_warning_stream ("Could only create %d buffers, while %d were requested", req.count, n_buffers);
+
+ for (i = 0; i < req.count; i++) {
+ ArvBuffer *buffer;
+ ArvV4l2StreamBufferData *buffer_data;
+ struct v4l2_buffer buf = {0};
+ unsigned char *v4l2_buffer = NULL;
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = i;
+
+ if (arv_v4l2_ioctl(priv->thread_data->v4l2_fd, VIDIOC_QUERYBUF, &buf) == -1) {
+ g_set_error (error, ARV_DEVICE_ERROR, ARV_DEVICE_ERROR_PROTOCOL_ERROR,
+ "Failed to request v4l2 buffer (%s)",
+ strerror(errno));
+ return FALSE;
+ }
+
+ v4l2_buffer = (u_int8_t *) mmap (NULL, buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ priv->thread_data->v4l2_fd, buf.m.offset);
+
+ size = buf.length;
+
+ buffer = arv_buffer_new_full (size, v4l2_buffer, user_data,user_data_destroy_func);
+
+ buffer_data = g_new0 (ArvV4l2StreamBufferData, 1);
+ buffer_data->v4l2_device = g_object_ref (priv->thread_data->v4l2_device);
+ buffer_data->data = buffer->priv->data;
+ buffer_data->size = size;
+
+ arv_debug_stream ("buffer %d data %p size %zu\n", i, buffer->priv->data, size);
+
+ g_object_set_data_full (G_OBJECT (buffer), "v4l2-buffer-data",
+ buffer_data, _buffer_data_destroy_func);
+ g_object_set_data (G_OBJECT(buffer), "v4l2-index", GINT_TO_POINTER(i));
+
+ arv_stream_push_buffer (stream, buffer);
+ }
+
+ arv_info_stream ("Created %d v4l2 native buffers", i);
+
+ return TRUE;
+}
+
/**
* arv_v4l2_stream_new: (skip)
* @camera: a #ArvV4l2Device
@@ -154,6 +503,8 @@ arv_v4l2_stream_stop_acquisition (ArvStream *stream, GError **error)
* @error: a #GError placeholder, %NULL to ignore
*
* Return Value: (transfer full): a new #ArvStream.
+ *
+ * Since: 0.10.0
*/
ArvStream *
@@ -178,7 +529,7 @@ arv_v4l2_stream_constructed (GObject *object)
ArvV4l2StreamThreadData *thread_data;
g_autoptr (ArvV4l2Device) v4l2_device = NULL;
- thread_data = g_new (ArvV4l2StreamThreadData, 1);
+ thread_data = g_new0 (ArvV4l2StreamThreadData, 1);
thread_data->stream = stream;
g_object_get (object,
@@ -201,6 +552,10 @@ arv_v4l2_stream_constructed (GObject *object)
G_TYPE_UINT64, &priv->thread_data->n_failures);
arv_stream_declare_info (ARV_STREAM (v4l2_stream), "n_underruns",
G_TYPE_UINT64, &priv->thread_data->n_underruns);
+ arv_stream_declare_info (ARV_STREAM (v4l2_stream), "n_transferred_bytes",
+ G_TYPE_UINT64, &priv->thread_data->n_transferred_bytes);
+
+ thread_data->v4l2_fd = arv_v4l2_device_get_fd (ARV_V4L2_DEVICE(thread_data->v4l2_device));
}
/* ArvStream implementation */
@@ -238,4 +593,5 @@ arv_v4l2_stream_class_init (ArvV4l2StreamClass *v4l2_stream_class)
stream_class->start_acquisition = arv_v4l2_stream_start_acquisition;
stream_class->stop_acquisition = arv_v4l2_stream_stop_acquisition;
+ stream_class->create_buffers = arv_v4l2_stream_create_buffers;
}
diff --git a/src/meson.build b/src/meson.build
index 7a1683f36..98240a6dd 100644
--- a/src/meson.build
+++ b/src/meson.build
@@ -230,23 +230,25 @@ if usb_dep.found()
]
endif
-if v4l2_dep.found()
+if v4l2_enabled
library_sources += [
'arvv4l2interface.c',
'arvv4l2device.c',
- 'arvv4l2stream.c'
+ 'arvv4l2stream.c',
+ 'arvv4l2misc.c',
]
library_no_introspection_sources += [
]
library_headers += [
'arvv4l2interface.h',
'arvv4l2device.h',
- 'arvv4l2stream.h'
+ 'arvv4l2stream.h',
]
library_private_headers += [
'arvv4l2deviceprivate.h',
'arvv4l2interfaceprivate.h',
'arvv4l2streamprivate.h',
+ 'arvv4l2miscprivate.h',
]
endif
@@ -265,7 +267,7 @@ configure_file (input: 'arvapi.h.in', output: 'arvapi.h',
features_library_config_data = configuration_data ()
features_library_config_data.set10 ('ARAVIS_HAS_USB', usb_dep.found())
features_library_config_data.set10 ('ARAVIS_HAS_EVENT', get_option('event'))
-features_library_config_data.set10 ('ARAVIS_HAS_V4L2', v4l2_dep.found())
+features_library_config_data.set10 ('ARAVIS_HAS_V4L2', v4l2_enabled)
features_library_config_data.set10 ('ARAVIS_HAS_PACKET_SOCKET', packet_socket_enabled)
features_library_config_data.set10 ('ARAVIS_HAS_FAST_HEARTBEAT', get_option ('fast-heartbeat'))
configure_file (input: 'arvfeatures.h.in', output: 'arvfeatures.h',
diff --git a/tests/meson.build b/tests/meson.build
index 2c5df5802..89892d608 100644
--- a/tests/meson.build
+++ b/tests/meson.build
@@ -92,9 +92,15 @@ if get_option('tests')
['arv-roi-test', 'arvroitest.c'],
['time-test', 'timetest.c'],
['load-http-test', 'loadhttptest.c'],
- ['cpp-test', 'cpp.cc']
+ ['cpp-test', 'cpp.cc'],
]
+ if get_option ('v4l2').enabled()
+ examples += [
+ ['v4l2-test', 'v4l2test.c'],
+ ]
+ endif
+
if host_machine.system()=='linux'
examples+=[['realtime-test','realtimetest.c']] # uses Linux RT API unavailable on other platforms
endif
diff --git a/tests/v4l2test.c b/tests/v4l2test.c
new file mode 100644
index 000000000..c8cc817d2
--- /dev/null
+++ b/tests/v4l2test.c
@@ -0,0 +1,669 @@
+/*
+ * V4L2 video capture example
+ *
+ * This program can be used and distributed without restrictions.
+ *
+ * This program is provided with the V4L2 API
+ * see http://linuxtv.org/docs.php for more information
+ */
+
+#include
+#include
+#include
+#include
+
+#include /* getopt_long() */
+
+#include /* low-level i/o */
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include
+
+#define CLEAR(x) memset(&(x), 0, sizeof(x))
+
+#ifndef V4L2_PIX_FMT_H264
+#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */
+#endif
+
+enum io_method {
+ IO_METHOD_READ,
+ IO_METHOD_MMAP,
+ IO_METHOD_USERPTR,
+};
+
+struct buffer {
+ void *start;
+ size_t length;
+};
+
+static char *dev_name;
+static enum io_method io = IO_METHOD_MMAP;
+static int fd = -1;
+struct buffer *buffers;
+static unsigned int n_buffers;
+static int out_buf;
+static int force_format;
+static int frame_count = 200;
+static int frame_number = 0;
+
+static void errno_exit(const char *s)
+{
+ fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
+ exit(EXIT_FAILURE);
+}
+
+static int xioctl(int fh, int request, void *arg)
+{
+ int r;
+
+ do {
+ r = ioctl(fh, request, arg);
+ } while (-1 == r && EINTR == errno);
+
+ return r;
+}
+
+static void process_image(const void *p, int size)
+{
+ char filename[15];
+ FILE *fp;
+
+ frame_number++;
+ sprintf(filename, "frame-%d.raw", frame_number);
+ fp=fopen(filename,"wb");
+
+ if (out_buf)
+ fwrite(p, size, 1, fp);
+
+ fflush(fp);
+ fclose(fp);
+}
+
+static int read_frame(void)
+{
+ struct v4l2_buffer buf;
+ unsigned int i;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ if (-1 == read(fd, buffers[0].start, buffers[0].length)) {
+ switch (errno) {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ errno_exit("read");
+ }
+ }
+
+ process_image(buffers[0].start, buffers[0].length);
+ break;
+
+ case IO_METHOD_MMAP:
+ CLEAR(buf);
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
+ switch (errno) {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ errno_exit("VIDIOC_DQBUF");
+ }
+ }
+
+ assert(buf.index < n_buffers);
+
+ process_image(buffers[buf.index].start, buf.bytesused);
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ errno_exit("VIDIOC_QBUF");
+ break;
+
+ case IO_METHOD_USERPTR:
+ CLEAR(buf);
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
+ switch (errno) {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ errno_exit("VIDIOC_DQBUF");
+ }
+ }
+
+ for (i = 0; i < n_buffers; ++i)
+ if (buf.m.userptr == (unsigned long)buffers[i].start
+ && buf.length == buffers[i].length)
+ break;
+
+ assert(i < n_buffers);
+
+ process_image((void *)buf.m.userptr, buf.bytesused);
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ errno_exit("VIDIOC_QBUF");
+ break;
+ }
+
+ return 1;
+}
+
+static void mainloop(void)
+{
+ unsigned int count;
+
+ count = frame_count;
+
+ while (count-- > 0) {
+ for (;;) {
+ fd_set fds;
+ struct timeval tv;
+ int r;
+
+ FD_ZERO(&fds);
+ FD_SET(fd, &fds);
+
+ /* Timeout. */
+ tv.tv_sec = 2;
+ tv.tv_usec = 0;
+
+ r = select(fd + 1, &fds, NULL, NULL, &tv);
+
+ if (-1 == r) {
+ if (EINTR == errno)
+ continue;
+ errno_exit("select");
+ }
+
+ if (0 == r) {
+ fprintf(stderr, "select timeout\n");
+ exit(EXIT_FAILURE);
+ }
+
+ if (read_frame())
+ break;
+ /* EAGAIN - continue select loop. */
+ }
+ }
+}
+
+static void stop_capturing(void)
+{
+ enum v4l2_buf_type type;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ /* Nothing to do. */
+ break;
+
+ case IO_METHOD_MMAP:
+ case IO_METHOD_USERPTR:
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type))
+ errno_exit("VIDIOC_STREAMOFF");
+ break;
+ }
+}
+
+static void start_capturing(void)
+{
+ unsigned int i;
+ enum v4l2_buf_type type;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ /* Nothing to do. */
+ break;
+
+ case IO_METHOD_MMAP:
+ for (i = 0; i < n_buffers; ++i) {
+ struct v4l2_buffer buf;
+
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = i;
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ errno_exit("VIDIOC_QBUF");
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
+ errno_exit("VIDIOC_STREAMON");
+ break;
+
+ case IO_METHOD_USERPTR:
+ for (i = 0; i < n_buffers; ++i) {
+ struct v4l2_buffer buf;
+
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+ buf.index = i;
+ buf.m.userptr = (unsigned long)buffers[i].start;
+ buf.length = buffers[i].length;
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ errno_exit("VIDIOC_QBUF");
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
+ errno_exit("VIDIOC_STREAMON");
+ break;
+ }
+}
+
+static void uninit_device(void)
+{
+ unsigned int i;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ free(buffers[0].start);
+ break;
+
+ case IO_METHOD_MMAP:
+ for (i = 0; i < n_buffers; ++i)
+ if (-1 == munmap(buffers[i].start, buffers[i].length))
+ errno_exit("munmap");
+ break;
+
+ case IO_METHOD_USERPTR:
+ for (i = 0; i < n_buffers; ++i)
+ free(buffers[i].start);
+ break;
+ }
+
+ free(buffers);
+}
+
+static void init_read(unsigned int buffer_size)
+{
+ buffers = calloc(1, sizeof(*buffers));
+
+ if (!buffers) {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ buffers[0].length = buffer_size;
+ buffers[0].start = malloc(buffer_size);
+
+ if (!buffers[0].start) {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+}
+
+static void init_mmap(void)
+{
+ struct v4l2_requestbuffers req;
+
+ CLEAR(req);
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ fprintf(stderr, "%s does not support "
+ "memory mapping\n", dev_name);
+ exit(EXIT_FAILURE);
+ } else {
+ errno_exit("VIDIOC_REQBUFS");
+ }
+ }
+
+ if (req.count < 2) {
+ fprintf(stderr, "Insufficient buffer memory on %s\n",
+ dev_name);
+ exit(EXIT_FAILURE);
+ }
+
+ buffers = calloc(req.count, sizeof(*buffers));
+
+ if (!buffers) {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
+ struct v4l2_buffer buf;
+
+ CLEAR(buf);
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = n_buffers;
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf))
+ errno_exit("VIDIOC_QUERYBUF");
+
+ buffers[n_buffers].length = buf.length;
+ buffers[n_buffers].start =
+ mmap(NULL /* start anywhere */,
+ buf.length,
+ PROT_READ | PROT_WRITE /* required */,
+ MAP_SHARED /* recommended */,
+ fd, buf.m.offset);
+
+ if (MAP_FAILED == buffers[n_buffers].start)
+ errno_exit("mmap");
+ }
+}
+
+static void init_userp(unsigned int buffer_size)
+{
+ struct v4l2_requestbuffers req;
+
+ CLEAR(req);
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {
+ if (EINVAL == errno) {
+ fprintf(stderr, "%s does not support "
+ "user pointer i/o\n", dev_name);
+ exit(EXIT_FAILURE);
+ } else {
+ errno_exit("VIDIOC_REQBUFS");
+ }
+ }
+
+ buffers = calloc(4, sizeof(*buffers));
+
+ if (!buffers) {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ for (n_buffers = 0; n_buffers < 4; ++n_buffers) {
+ buffers[n_buffers].length = buffer_size;
+ buffers[n_buffers].start = malloc(buffer_size);
+
+ if (!buffers[n_buffers].start) {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+ }
+}
+
+static void init_device(void)
+{
+ struct v4l2_capability cap;
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+ struct v4l2_format fmt;
+ unsigned int min;
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) {
+ if (EINVAL == errno) {
+ fprintf(stderr, "%s is no V4L2 device\n",
+ dev_name);
+ exit(EXIT_FAILURE);
+ } else {
+ errno_exit("VIDIOC_QUERYCAP");
+ }
+ }
+
+ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+ fprintf(stderr, "%s is no video capture device\n",
+ dev_name);
+ exit(EXIT_FAILURE);
+ }
+
+ switch (io) {
+ case IO_METHOD_READ:
+ if (!(cap.capabilities & V4L2_CAP_READWRITE)) {
+ fprintf(stderr, "%s does not support read i/o\n",
+ dev_name);
+ exit(EXIT_FAILURE);
+ }
+ break;
+
+ case IO_METHOD_MMAP:
+ case IO_METHOD_USERPTR:
+ if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
+ fprintf(stderr, "%s does not support streaming i/o\n",
+ dev_name);
+ exit(EXIT_FAILURE);
+ }
+ break;
+ }
+
+
+ /* Select video input, video standard and tune here. */
+
+
+ CLEAR(cropcap);
+
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) {
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = cropcap.defrect; /* reset to default */
+
+ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) {
+ switch (errno) {
+ case EINVAL:
+ /* Cropping not supported. */
+ break;
+ default:
+ /* Errors ignored. */
+ break;
+ }
+ }
+ } else {
+ /* Errors ignored. */
+ }
+
+
+ CLEAR(fmt);
+
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (force_format) {
+ fprintf(stderr, "Set H264\r\n");
+ fmt.fmt.pix.width = 640; //replace
+ fmt.fmt.pix.height = 480; //replace
+ fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_H264; //replace
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt))
+ errno_exit("VIDIOC_S_FMT");
+
+ /* Note VIDIOC_S_FMT may change width and height. */
+ } else {
+ /* Preserve original settings as set by v4l2-ctl for example */
+ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt))
+ errno_exit("VIDIOC_G_FMT");
+ }
+
+ /* Buggy driver paranoia. */
+ min = fmt.fmt.pix.width * 2;
+ if (fmt.fmt.pix.bytesperline < min)
+ fmt.fmt.pix.bytesperline = min;
+ min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
+ if (fmt.fmt.pix.sizeimage < min)
+ fmt.fmt.pix.sizeimage = min;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ init_read(fmt.fmt.pix.sizeimage);
+ break;
+
+ case IO_METHOD_MMAP:
+ init_mmap();
+ break;
+
+ case IO_METHOD_USERPTR:
+ init_userp(fmt.fmt.pix.sizeimage);
+ break;
+ }
+}
+
+static void close_device(void)
+{
+ if (-1 == close(fd))
+ errno_exit("close");
+
+ fd = -1;
+}
+
+static void open_device(void)
+{
+ struct stat st;
+
+ if (-1 == stat(dev_name, &st)) {
+ fprintf(stderr, "Cannot identify '%s': %d, %s\n",
+ dev_name, errno, strerror(errno));
+ exit(EXIT_FAILURE);
+ }
+
+ if (!S_ISCHR(st.st_mode)) {
+ fprintf(stderr, "%s is no device\n", dev_name);
+ exit(EXIT_FAILURE);
+ }
+
+ fd = open(dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);
+
+ if (-1 == fd) {
+ fprintf(stderr, "Cannot open '%s': %d, %s\n",
+ dev_name, errno, strerror(errno));
+ exit(EXIT_FAILURE);
+ }
+}
+
+static void usage(FILE *fp, int argc, char **argv)
+{
+ fprintf(fp,
+ "Usage: %s [options]\n\n"
+ "Version 1.3\n"
+ "Options:\n"
+ "-d | --device name Video device name [%s]\n"
+ "-h | --help Print this message\n"
+ "-m | --mmap Use memory mapped buffers [default]\n"
+ "-r | --read Use read() calls\n"
+ "-u | --userp Use application allocated buffers\n"
+ "-o | --output Outputs stream to stdout\n"
+ "-f | --format Force format to 640x480 YUYV\n"
+ "-c | --count Number of frames to grab [%i]\n"
+ "",
+ argv[0], dev_name, frame_count);
+}
+
+static const char short_options[] = "d:hmruofc:";
+
+static const struct option
+long_options[] = {
+ { "device", required_argument, NULL, 'd' },
+ { "help", no_argument, NULL, 'h' },
+ { "mmap", no_argument, NULL, 'm' },
+ { "read", no_argument, NULL, 'r' },
+ { "userp", no_argument, NULL, 'u' },
+ { "output", no_argument, NULL, 'o' },
+ { "format", no_argument, NULL, 'f' },
+ { "count", required_argument, NULL, 'c' },
+ { 0, 0, 0, 0 }
+};
+
+int main(int argc, char **argv)
+{
+ dev_name = "/dev/video0";
+
+ for (;;) {
+ int idx;
+ int c;
+
+ c = getopt_long(argc, argv,
+ short_options, long_options, &idx);
+
+ if (-1 == c)
+ break;
+
+ switch (c) {
+ case 0: /* getopt_long() flag */
+ break;
+
+ case 'd':
+ dev_name = optarg;
+ break;
+
+ case 'h':
+ usage(stdout, argc, argv);
+ exit(EXIT_SUCCESS);
+
+ case 'm':
+ io = IO_METHOD_MMAP;
+ break;
+
+ case 'r':
+ io = IO_METHOD_READ;
+ break;
+
+ case 'u':
+ io = IO_METHOD_USERPTR;
+ break;
+
+ case 'o':
+ out_buf++;
+ break;
+
+ case 'f':
+ force_format++;
+ break;
+
+ case 'c':
+ errno = 0;
+ frame_count = strtol(optarg, NULL, 0);
+ if (errno)
+ errno_exit(optarg);
+ break;
+
+ default:
+ usage(stderr, argc, argv);
+ exit(EXIT_FAILURE);
+ }
+ }
+
+ open_device();
+ init_device();
+ start_capturing();
+ mainloop();
+ stop_capturing();
+ uninit_device();
+ close_device();
+ fprintf(stderr, "\n");
+ return 0;
+}
diff --git a/viewer/arvviewerresources.xml b/viewer/arvviewerresources.xml
index b8f1215c8..3d97312a4 100644
--- a/viewer/arvviewerresources.xml
+++ b/viewer/arvviewerresources.xml
@@ -4,6 +4,7 @@
arv-viewer.ui
icons/gnome/scalable/devices/aravis-gigevision-symbolic.svg
icons/gnome/scalable/devices/aravis-usb3vision-symbolic.svg
+ icons/gnome/scalable/devices/aravis-v4l2-symbolic.svg
icons/gnome/scalable/devices/aravis-fake-symbolic.svg
diff --git a/viewer/icons/gnome/scalable/devices/aravis-v4l2-symbolic.svg b/viewer/icons/gnome/scalable/devices/aravis-v4l2-symbolic.svg
new file mode 100644
index 000000000..64948f914
--- /dev/null
+++ b/viewer/icons/gnome/scalable/devices/aravis-v4l2-symbolic.svg
@@ -0,0 +1,177 @@
+
+