1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * V4L2 controls framework core implementation.
8 #include <linux/export.h>
10 #include <linux/slab.h>
11 #include <media/v4l2-ctrls.h>
12 #include <media/v4l2-event.h>
13 #include <media/v4l2-fwnode.h>
15 #include "v4l2-ctrls-priv.h"
17 static const union v4l2_ctrl_ptr ptr_null;
19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl,
22 memset(ev, 0, sizeof(*ev));
23 ev->type = V4L2_EVENT_CTRL;
25 ev->u.ctrl.changes = changes;
26 ev->u.ctrl.type = ctrl->type;
27 ev->u.ctrl.flags = user_flags(ctrl);
29 ev->u.ctrl.value64 = 0;
31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64;
32 ev->u.ctrl.minimum = ctrl->minimum;
33 ev->u.ctrl.maximum = ctrl->maximum;
34 if (ctrl->type == V4L2_CTRL_TYPE_MENU
35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU)
38 ev->u.ctrl.step = ctrl->step;
39 ev->u.ctrl.default_value = ctrl->default_value;
42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl)
45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS;
47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY))
48 changes |= V4L2_EVENT_CTRL_CH_VALUE;
49 fill_event(&ev, ctrl, changes);
50 v4l2_event_queue_fh(fh, &ev);
53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes)
56 struct v4l2_subscribed_event *sev;
58 if (list_empty(&ctrl->ev_subs))
60 fill_event(&ev, ctrl, changes);
62 list_for_each_entry(sev, &ctrl->ev_subs, node)
64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK))
65 v4l2_event_queue_fh(sev->fh, &ev);
68 static bool std_equal(const struct v4l2_ctrl *ctrl, u32 idx,
69 union v4l2_ctrl_ptr ptr1,
70 union v4l2_ctrl_ptr ptr2)
73 case V4L2_CTRL_TYPE_BUTTON:
75 case V4L2_CTRL_TYPE_STRING:
76 idx *= ctrl->elem_size;
77 /* strings are always 0-terminated */
78 return !strcmp(ptr1.p_char + idx, ptr2.p_char + idx);
79 case V4L2_CTRL_TYPE_INTEGER64:
80 return ptr1.p_s64[idx] == ptr2.p_s64[idx];
81 case V4L2_CTRL_TYPE_U8:
82 return ptr1.p_u8[idx] == ptr2.p_u8[idx];
83 case V4L2_CTRL_TYPE_U16:
84 return ptr1.p_u16[idx] == ptr2.p_u16[idx];
85 case V4L2_CTRL_TYPE_U32:
86 return ptr1.p_u32[idx] == ptr2.p_u32[idx];
89 return ptr1.p_s32[idx] == ptr2.p_s32[idx];
90 idx *= ctrl->elem_size;
91 return !memcmp(ptr1.p_const + idx, ptr2.p_const + idx,
96 /* Default intra MPEG-2 quantisation coefficients, from the specification. */
97 static const u8 mpeg2_intra_quant_matrix[64] = {
98 8, 16, 16, 19, 16, 19, 22, 22,
99 22, 22, 22, 22, 26, 24, 26, 27,
100 27, 27, 26, 26, 26, 26, 27, 27,
101 27, 29, 29, 29, 34, 34, 34, 29,
102 29, 29, 27, 27, 29, 29, 32, 32,
103 34, 34, 37, 38, 37, 35, 35, 34,
104 35, 38, 38, 40, 40, 40, 48, 48,
105 46, 46, 56, 56, 58, 69, 69, 83
108 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx,
109 union v4l2_ctrl_ptr ptr)
111 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
112 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
113 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant;
114 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
115 struct v4l2_ctrl_fwht_params *p_fwht_params;
116 void *p = ptr.p + idx * ctrl->elem_size;
118 if (ctrl->p_def.p_const)
119 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size);
121 memset(p, 0, ctrl->elem_size);
123 switch ((u32)ctrl->type) {
124 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
125 p_mpeg2_sequence = p;
128 p_mpeg2_sequence->chroma_format = 1;
130 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
133 /* interlaced top field */
134 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD;
135 p_mpeg2_picture->picture_coding_type =
136 V4L2_MPEG2_PIC_CODING_TYPE_I;
138 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
141 memcpy(p_mpeg2_quant->intra_quantiser_matrix,
142 mpeg2_intra_quant_matrix,
143 ARRAY_SIZE(mpeg2_intra_quant_matrix));
145 * The default non-intra MPEG-2 quantisation
146 * coefficients are all 16, as per the specification.
148 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16,
149 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix));
151 case V4L2_CTRL_TYPE_VP8_FRAME:
153 p_vp8_frame->num_dct_parts = 1;
155 case V4L2_CTRL_TYPE_FWHT_PARAMS:
157 p_fwht_params->version = V4L2_FWHT_VERSION;
158 p_fwht_params->width = 1280;
159 p_fwht_params->height = 720;
160 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV |
161 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET);
166 static void std_init(const struct v4l2_ctrl *ctrl, u32 idx,
167 union v4l2_ctrl_ptr ptr)
169 switch (ctrl->type) {
170 case V4L2_CTRL_TYPE_STRING:
171 idx *= ctrl->elem_size;
172 memset(ptr.p_char + idx, ' ', ctrl->minimum);
173 ptr.p_char[idx + ctrl->minimum] = '\0';
175 case V4L2_CTRL_TYPE_INTEGER64:
176 ptr.p_s64[idx] = ctrl->default_value;
178 case V4L2_CTRL_TYPE_INTEGER:
179 case V4L2_CTRL_TYPE_INTEGER_MENU:
180 case V4L2_CTRL_TYPE_MENU:
181 case V4L2_CTRL_TYPE_BITMASK:
182 case V4L2_CTRL_TYPE_BOOLEAN:
183 ptr.p_s32[idx] = ctrl->default_value;
185 case V4L2_CTRL_TYPE_BUTTON:
186 case V4L2_CTRL_TYPE_CTRL_CLASS:
189 case V4L2_CTRL_TYPE_U8:
190 ptr.p_u8[idx] = ctrl->default_value;
192 case V4L2_CTRL_TYPE_U16:
193 ptr.p_u16[idx] = ctrl->default_value;
195 case V4L2_CTRL_TYPE_U32:
196 ptr.p_u32[idx] = ctrl->default_value;
199 std_init_compound(ctrl, idx, ptr);
204 static void std_log(const struct v4l2_ctrl *ctrl)
206 union v4l2_ctrl_ptr ptr = ctrl->p_cur;
208 if (ctrl->is_array) {
211 for (i = 0; i < ctrl->nr_of_dims; i++)
212 pr_cont("[%u]", ctrl->dims[i]);
216 switch (ctrl->type) {
217 case V4L2_CTRL_TYPE_INTEGER:
218 pr_cont("%d", *ptr.p_s32);
220 case V4L2_CTRL_TYPE_BOOLEAN:
221 pr_cont("%s", *ptr.p_s32 ? "true" : "false");
223 case V4L2_CTRL_TYPE_MENU:
224 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]);
226 case V4L2_CTRL_TYPE_INTEGER_MENU:
227 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]);
229 case V4L2_CTRL_TYPE_BITMASK:
230 pr_cont("0x%08x", *ptr.p_s32);
232 case V4L2_CTRL_TYPE_INTEGER64:
233 pr_cont("%lld", *ptr.p_s64);
235 case V4L2_CTRL_TYPE_STRING:
236 pr_cont("%s", ptr.p_char);
238 case V4L2_CTRL_TYPE_U8:
239 pr_cont("%u", (unsigned)*ptr.p_u8);
241 case V4L2_CTRL_TYPE_U16:
242 pr_cont("%u", (unsigned)*ptr.p_u16);
244 case V4L2_CTRL_TYPE_U32:
245 pr_cont("%u", (unsigned)*ptr.p_u32);
247 case V4L2_CTRL_TYPE_H264_SPS:
250 case V4L2_CTRL_TYPE_H264_PPS:
253 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
254 pr_cont("H264_SCALING_MATRIX");
256 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
257 pr_cont("H264_SLICE_PARAMS");
259 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
260 pr_cont("H264_DECODE_PARAMS");
262 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
263 pr_cont("H264_PRED_WEIGHTS");
265 case V4L2_CTRL_TYPE_FWHT_PARAMS:
266 pr_cont("FWHT_PARAMS");
268 case V4L2_CTRL_TYPE_VP8_FRAME:
269 pr_cont("VP8_FRAME");
271 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
272 pr_cont("HDR10_CLL_INFO");
274 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
275 pr_cont("HDR10_MASTERING_DISPLAY");
277 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
278 pr_cont("MPEG2_QUANTISATION");
280 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
281 pr_cont("MPEG2_SEQUENCE");
283 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
284 pr_cont("MPEG2_PICTURE");
286 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
287 pr_cont("VP9_COMPRESSED_HDR");
289 case V4L2_CTRL_TYPE_VP9_FRAME:
290 pr_cont("VP9_FRAME");
293 pr_cont("unknown type %d", ctrl->type);
299 * Round towards the closest legal value. Be careful when we are
300 * close to the maximum range of the control type to prevent
303 #define ROUND_TO_RANGE(val, offset_type, ctrl) \
305 offset_type offset; \
306 if ((ctrl)->maximum >= 0 && \
307 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \
308 val = (ctrl)->maximum; \
310 val += (s32)((ctrl)->step / 2); \
311 val = clamp_t(typeof(val), val, \
312 (ctrl)->minimum, (ctrl)->maximum); \
313 offset = (val) - (ctrl)->minimum; \
314 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \
315 val = (ctrl)->minimum + offset; \
319 /* Validate a new control */
321 #define zero_padding(s) \
322 memset(&(s).padding, 0, sizeof((s).padding))
323 #define zero_reserved(s) \
324 memset(&(s).reserved, 0, sizeof((s).reserved))
327 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf)
331 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED |
332 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE))
335 /* That all values are in the accepted range. */
336 if (lf->level > GENMASK(5, 0))
339 if (lf->sharpness > GENMASK(2, 0))
342 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++)
343 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63)
346 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++)
347 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63)
355 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant)
357 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 ||
358 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 ||
359 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15)
362 zero_reserved(*quant);
367 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg)
371 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED |
372 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP |
373 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE |
374 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA |
375 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
378 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) {
379 if (seg->feature_enabled[i] &
380 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK)
384 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) {
385 const int range[] = { 255, 63, 3, 0 };
387 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) {
388 if (seg->feature_data[i][j] < -range[j] ||
389 seg->feature_data[i][j] > range[j])
399 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr)
401 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT)
408 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame)
412 /* Make sure we're not passed invalid flags. */
413 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
414 V4L2_VP9_FRAME_FLAG_SHOW_FRAME |
415 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT |
416 V4L2_VP9_FRAME_FLAG_INTRA_ONLY |
417 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV |
418 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX |
419 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE |
420 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING |
421 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING |
422 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING))
425 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT &&
426 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX)
429 if (frame->profile > V4L2_VP9_PROFILE_MAX)
432 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL)
435 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX)
439 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10
442 if ((frame->profile < 2 && frame->bit_depth != 8) ||
443 (frame->profile >= 2 &&
444 (frame->bit_depth != 10 && frame->bit_depth != 12)))
447 /* Profile 0 and 2 only accept YUV 4:2:0. */
448 if ((frame->profile == 0 || frame->profile == 2) &&
449 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) ||
450 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
453 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */
454 if ((frame->profile == 1 || frame->profile == 3) &&
455 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) &&
456 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
459 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE)
463 * According to the spec, tile_cols_log2 shall be less than or equal
466 if (frame->tile_cols_log2 > 6)
469 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT)
472 ret = validate_vp9_lf_params(&frame->lf);
476 ret = validate_vp9_quant_params(&frame->quant);
480 ret = validate_vp9_seg_params(&frame->seg);
484 zero_reserved(*frame);
489 * Compound controls validation requires setting unused fields/flags to zero
490 * in order to properly detect unchanged controls with std_equal's memcmp.
492 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx,
493 union v4l2_ctrl_ptr ptr)
495 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
496 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
497 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
498 struct v4l2_ctrl_fwht_params *p_fwht_params;
499 struct v4l2_ctrl_h264_sps *p_h264_sps;
500 struct v4l2_ctrl_h264_pps *p_h264_pps;
501 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights;
502 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params;
503 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params;
504 struct v4l2_ctrl_hevc_sps *p_hevc_sps;
505 struct v4l2_ctrl_hevc_pps *p_hevc_pps;
506 struct v4l2_ctrl_hevc_slice_params *p_hevc_slice_params;
507 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering;
508 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params;
509 struct v4l2_area *area;
510 void *p = ptr.p + idx * ctrl->elem_size;
513 switch ((u32)ctrl->type) {
514 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
515 p_mpeg2_sequence = p;
517 switch (p_mpeg2_sequence->chroma_format) {
527 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
530 switch (p_mpeg2_picture->intra_dc_precision) {
533 case 2: /* 10 bits */
534 case 3: /* 11 bits */
540 switch (p_mpeg2_picture->picture_structure) {
541 case V4L2_MPEG2_PIC_TOP_FIELD:
542 case V4L2_MPEG2_PIC_BOTTOM_FIELD:
543 case V4L2_MPEG2_PIC_FRAME:
549 switch (p_mpeg2_picture->picture_coding_type) {
550 case V4L2_MPEG2_PIC_CODING_TYPE_I:
551 case V4L2_MPEG2_PIC_CODING_TYPE_P:
552 case V4L2_MPEG2_PIC_CODING_TYPE_B:
557 zero_reserved(*p_mpeg2_picture);
560 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
563 case V4L2_CTRL_TYPE_FWHT_PARAMS:
565 if (p_fwht_params->version < V4L2_FWHT_VERSION)
567 if (!p_fwht_params->width || !p_fwht_params->height)
571 case V4L2_CTRL_TYPE_H264_SPS:
574 /* Some syntax elements are only conditionally valid */
575 if (p_h264_sps->pic_order_cnt_type != 0) {
576 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
577 } else if (p_h264_sps->pic_order_cnt_type != 1) {
578 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0;
579 p_h264_sps->offset_for_non_ref_pic = 0;
580 p_h264_sps->offset_for_top_to_bottom_field = 0;
581 memset(&p_h264_sps->offset_for_ref_frame, 0,
582 sizeof(p_h264_sps->offset_for_ref_frame));
585 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) {
586 p_h264_sps->chroma_format_idc = 1;
587 p_h264_sps->bit_depth_luma_minus8 = 0;
588 p_h264_sps->bit_depth_chroma_minus8 = 0;
591 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
593 if (p_h264_sps->chroma_format_idc < 3)
595 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
598 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY)
600 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
603 * Chroma 4:2:2 format require at least High 4:2:2 profile.
605 * The H264 specification and well-known parser implementations
606 * use profile-idc values directly, as that is clearer and
607 * less ambiguous. We do the same here.
609 if (p_h264_sps->profile_idc < 122 &&
610 p_h264_sps->chroma_format_idc > 1)
612 /* Chroma 4:4:4 format require at least High 4:2:2 profile */
613 if (p_h264_sps->profile_idc < 244 &&
614 p_h264_sps->chroma_format_idc > 2)
616 if (p_h264_sps->chroma_format_idc > 3)
619 if (p_h264_sps->bit_depth_luma_minus8 > 6)
621 if (p_h264_sps->bit_depth_chroma_minus8 > 6)
623 if (p_h264_sps->log2_max_frame_num_minus4 > 12)
625 if (p_h264_sps->pic_order_cnt_type > 2)
627 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12)
629 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN)
633 case V4L2_CTRL_TYPE_H264_PPS:
636 if (p_h264_pps->num_slice_groups_minus1 > 7)
638 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 >
639 (V4L2_H264_REF_LIST_LEN - 1))
641 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 >
642 (V4L2_H264_REF_LIST_LEN - 1))
644 if (p_h264_pps->weighted_bipred_idc > 2)
647 * pic_init_qp_minus26 shall be in the range of
648 * -(26 + QpBdOffset_y) to +25, inclusive,
649 * where QpBdOffset_y is 6 * bit_depth_luma_minus8
651 if (p_h264_pps->pic_init_qp_minus26 < -62 ||
652 p_h264_pps->pic_init_qp_minus26 > 25)
654 if (p_h264_pps->pic_init_qs_minus26 < -26 ||
655 p_h264_pps->pic_init_qs_minus26 > 25)
657 if (p_h264_pps->chroma_qp_index_offset < -12 ||
658 p_h264_pps->chroma_qp_index_offset > 12)
660 if (p_h264_pps->second_chroma_qp_index_offset < -12 ||
661 p_h264_pps->second_chroma_qp_index_offset > 12)
665 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
668 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
669 p_h264_pred_weights = p;
671 if (p_h264_pred_weights->luma_log2_weight_denom > 7)
673 if (p_h264_pred_weights->chroma_log2_weight_denom > 7)
677 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
678 p_h264_slice_params = p;
680 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
681 p_h264_slice_params->flags &=
682 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
684 if (p_h264_slice_params->colour_plane_id > 2)
686 if (p_h264_slice_params->cabac_init_idc > 2)
688 if (p_h264_slice_params->disable_deblocking_filter_idc > 2)
690 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 ||
691 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6)
693 if (p_h264_slice_params->slice_beta_offset_div2 < -6 ||
694 p_h264_slice_params->slice_beta_offset_div2 > 6)
697 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I ||
698 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI)
699 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0;
700 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
701 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0;
703 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 >
704 (V4L2_H264_REF_LIST_LEN - 1))
706 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 >
707 (V4L2_H264_REF_LIST_LEN - 1))
709 zero_reserved(*p_h264_slice_params);
712 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
713 p_h264_dec_params = p;
715 if (p_h264_dec_params->nal_ref_idc > 3)
717 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) {
718 struct v4l2_h264_dpb_entry *dpb_entry =
719 &p_h264_dec_params->dpb[i];
721 zero_reserved(*dpb_entry);
723 zero_reserved(*p_h264_dec_params);
726 case V4L2_CTRL_TYPE_VP8_FRAME:
729 switch (p_vp8_frame->num_dct_parts) {
738 zero_padding(p_vp8_frame->segment);
739 zero_padding(p_vp8_frame->lf);
740 zero_padding(p_vp8_frame->quant);
741 zero_padding(p_vp8_frame->entropy);
742 zero_padding(p_vp8_frame->coder_state);
745 case V4L2_CTRL_TYPE_HEVC_SPS:
748 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) {
749 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0;
750 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0;
751 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0;
752 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0;
755 if (!(p_hevc_sps->flags &
756 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT))
757 p_hevc_sps->num_long_term_ref_pics_sps = 0;
760 case V4L2_CTRL_TYPE_HEVC_PPS:
763 if (!(p_hevc_pps->flags &
764 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED))
765 p_hevc_pps->diff_cu_qp_delta_depth = 0;
767 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) {
768 p_hevc_pps->num_tile_columns_minus1 = 0;
769 p_hevc_pps->num_tile_rows_minus1 = 0;
770 memset(&p_hevc_pps->column_width_minus1, 0,
771 sizeof(p_hevc_pps->column_width_minus1));
772 memset(&p_hevc_pps->row_height_minus1, 0,
773 sizeof(p_hevc_pps->row_height_minus1));
776 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
779 if (p_hevc_pps->flags &
780 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) {
781 p_hevc_pps->pps_beta_offset_div2 = 0;
782 p_hevc_pps->pps_tc_offset_div2 = 0;
785 zero_padding(*p_hevc_pps);
788 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
789 p_hevc_decode_params = p;
791 if (p_hevc_decode_params->num_active_dpb_entries >
792 V4L2_HEVC_DPB_ENTRIES_NUM_MAX)
795 for (i = 0; i < p_hevc_decode_params->num_active_dpb_entries;
797 struct v4l2_hevc_dpb_entry *dpb_entry =
798 &p_hevc_decode_params->dpb[i];
800 zero_padding(*dpb_entry);
804 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
805 p_hevc_slice_params = p;
807 zero_padding(p_hevc_slice_params->pred_weight_table);
808 zero_padding(*p_hevc_slice_params);
811 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
814 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
815 p_hdr10_mastering = p;
817 for (i = 0; i < 3; ++i) {
818 if (p_hdr10_mastering->display_primaries_x[i] <
819 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW ||
820 p_hdr10_mastering->display_primaries_x[i] >
821 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH ||
822 p_hdr10_mastering->display_primaries_y[i] <
823 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW ||
824 p_hdr10_mastering->display_primaries_y[i] >
825 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH)
829 if (p_hdr10_mastering->white_point_x <
830 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW ||
831 p_hdr10_mastering->white_point_x >
832 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH ||
833 p_hdr10_mastering->white_point_y <
834 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW ||
835 p_hdr10_mastering->white_point_y >
836 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH)
839 if (p_hdr10_mastering->max_display_mastering_luminance <
840 V4L2_HDR10_MASTERING_MAX_LUMA_LOW ||
841 p_hdr10_mastering->max_display_mastering_luminance >
842 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH ||
843 p_hdr10_mastering->min_display_mastering_luminance <
844 V4L2_HDR10_MASTERING_MIN_LUMA_LOW ||
845 p_hdr10_mastering->min_display_mastering_luminance >
846 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
849 /* The following restriction comes from ITU-T Rec. H.265 spec */
850 if (p_hdr10_mastering->max_display_mastering_luminance ==
851 V4L2_HDR10_MASTERING_MAX_LUMA_LOW &&
852 p_hdr10_mastering->min_display_mastering_luminance ==
853 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
858 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
861 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
862 return validate_vp9_compressed_hdr(p);
864 case V4L2_CTRL_TYPE_VP9_FRAME:
865 return validate_vp9_frame(p);
867 case V4L2_CTRL_TYPE_AREA:
869 if (!area->width || !area->height)
880 static int std_validate(const struct v4l2_ctrl *ctrl, u32 idx,
881 union v4l2_ctrl_ptr ptr)
887 switch ((u32)ctrl->type) {
888 case V4L2_CTRL_TYPE_INTEGER:
889 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl);
890 case V4L2_CTRL_TYPE_INTEGER64:
892 * We can't use the ROUND_TO_RANGE define here due to
893 * the u64 divide that needs special care.
895 val = ptr.p_s64[idx];
896 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2))
899 val += (s64)(ctrl->step / 2);
900 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum);
901 offset = val - ctrl->minimum;
902 do_div(offset, ctrl->step);
903 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step;
905 case V4L2_CTRL_TYPE_U8:
906 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl);
907 case V4L2_CTRL_TYPE_U16:
908 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl);
909 case V4L2_CTRL_TYPE_U32:
910 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl);
912 case V4L2_CTRL_TYPE_BOOLEAN:
913 ptr.p_s32[idx] = !!ptr.p_s32[idx];
916 case V4L2_CTRL_TYPE_MENU:
917 case V4L2_CTRL_TYPE_INTEGER_MENU:
918 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum)
920 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG &&
921 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx])))
923 if (ctrl->type == V4L2_CTRL_TYPE_MENU &&
924 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0')
928 case V4L2_CTRL_TYPE_BITMASK:
929 ptr.p_s32[idx] &= ctrl->maximum;
932 case V4L2_CTRL_TYPE_BUTTON:
933 case V4L2_CTRL_TYPE_CTRL_CLASS:
937 case V4L2_CTRL_TYPE_STRING:
938 idx *= ctrl->elem_size;
939 len = strlen(ptr.p_char + idx);
940 if (len < ctrl->minimum)
942 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step)
947 return std_validate_compound(ctrl, idx, ptr);
951 static const struct v4l2_ctrl_type_ops std_type_ops = {
955 .validate = std_validate,
958 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv)
963 ctrl->call_notify = 0;
966 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify))
968 ctrl->handler->notify = notify;
969 ctrl->handler->notify_priv = priv;
970 ctrl->call_notify = 1;
972 EXPORT_SYMBOL(v4l2_ctrl_notify);
974 /* Copy the one value to another. */
975 static void ptr_to_ptr(struct v4l2_ctrl *ctrl,
976 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to)
980 memcpy(to.p, from.p_const, ctrl->elems * ctrl->elem_size);
983 /* Copy the new value to the current value. */
984 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags)
991 /* has_changed is set by cluster_changed */
992 changed = ctrl->has_changed;
994 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur);
996 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) {
997 /* Note: CH_FLAGS is only set for auto clusters. */
999 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE);
1000 if (!is_cur_manual(ctrl->cluster[0])) {
1001 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE;
1002 if (ctrl->cluster[0]->has_volatiles)
1003 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE;
1007 if (changed || ch_flags) {
1008 /* If a control was changed that was not one of the controls
1009 modified by the application, then send the event to all. */
1012 send_event(fh, ctrl,
1013 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags);
1014 if (ctrl->call_notify && changed && ctrl->handler->notify)
1015 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv);
1019 /* Copy the current value to the new value */
1020 void cur_to_new(struct v4l2_ctrl *ctrl)
1024 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new);
1027 /* Copy the new value to the request value */
1028 void new_to_req(struct v4l2_ctrl_ref *ref)
1032 ptr_to_ptr(ref->ctrl, ref->ctrl->p_new, ref->p_req);
1033 ref->valid_p_req = true;
1036 /* Copy the current value to the request value */
1037 void cur_to_req(struct v4l2_ctrl_ref *ref)
1041 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->p_req);
1042 ref->valid_p_req = true;
1045 /* Copy the request value to the new value */
1046 void req_to_new(struct v4l2_ctrl_ref *ref)
1050 if (ref->valid_p_req)
1051 ptr_to_ptr(ref->ctrl, ref->p_req, ref->ctrl->p_new);
1053 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->ctrl->p_new);
1056 /* Control range checking */
1057 int check_range(enum v4l2_ctrl_type type,
1058 s64 min, s64 max, u64 step, s64 def)
1061 case V4L2_CTRL_TYPE_BOOLEAN:
1062 if (step != 1 || max > 1 || min < 0)
1065 case V4L2_CTRL_TYPE_U8:
1066 case V4L2_CTRL_TYPE_U16:
1067 case V4L2_CTRL_TYPE_U32:
1068 case V4L2_CTRL_TYPE_INTEGER:
1069 case V4L2_CTRL_TYPE_INTEGER64:
1070 if (step == 0 || min > max || def < min || def > max)
1073 case V4L2_CTRL_TYPE_BITMASK:
1074 if (step || min || !max || (def & ~max))
1077 case V4L2_CTRL_TYPE_MENU:
1078 case V4L2_CTRL_TYPE_INTEGER_MENU:
1079 if (min > max || def < min || def > max)
1081 /* Note: step == menu_skip_mask for menu controls.
1082 So here we check if the default value is masked out. */
1083 if (step && ((1 << def) & step))
1086 case V4L2_CTRL_TYPE_STRING:
1087 if (min > max || min < 0 || step < 1 || def)
1095 /* Validate a new control */
1096 int validate_new(const struct v4l2_ctrl *ctrl, union v4l2_ctrl_ptr p_new)
1101 for (idx = 0; !err && idx < ctrl->elems; idx++)
1102 err = ctrl->type_ops->validate(ctrl, idx, p_new);
1106 /* Set the handler's error code if it wasn't set earlier already */
1107 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err)
1109 if (hdl->error == 0)
1114 /* Initialize the handler */
1115 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl,
1116 unsigned nr_of_controls_hint,
1117 struct lock_class_key *key, const char *name)
1119 mutex_init(&hdl->_lock);
1120 hdl->lock = &hdl->_lock;
1121 lockdep_set_class_and_name(hdl->lock, key, name);
1122 INIT_LIST_HEAD(&hdl->ctrls);
1123 INIT_LIST_HEAD(&hdl->ctrl_refs);
1124 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8;
1125 hdl->buckets = kvmalloc_array(hdl->nr_of_buckets,
1126 sizeof(hdl->buckets[0]),
1127 GFP_KERNEL | __GFP_ZERO);
1128 hdl->error = hdl->buckets ? 0 : -ENOMEM;
1129 v4l2_ctrl_handler_init_request(hdl);
1132 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class);
1134 /* Free all controls and control refs */
1135 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl)
1137 struct v4l2_ctrl_ref *ref, *next_ref;
1138 struct v4l2_ctrl *ctrl, *next_ctrl;
1139 struct v4l2_subscribed_event *sev, *next_sev;
1141 if (hdl == NULL || hdl->buckets == NULL)
1144 v4l2_ctrl_handler_free_request(hdl);
1146 mutex_lock(hdl->lock);
1147 /* Free all nodes */
1148 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) {
1149 list_del(&ref->node);
1152 /* Free all controls owned by the handler */
1153 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) {
1154 list_del(&ctrl->node);
1155 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node)
1156 list_del(&sev->node);
1159 kvfree(hdl->buckets);
1160 hdl->buckets = NULL;
1163 mutex_unlock(hdl->lock);
1164 mutex_destroy(&hdl->_lock);
1166 EXPORT_SYMBOL(v4l2_ctrl_handler_free);
1168 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer
1169 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing
1170 with applications that do not use the NEXT_CTRL flag.
1172 We just find the n-th private user control. It's O(N), but that should not
1173 be an issue in this particular case. */
1174 static struct v4l2_ctrl_ref *find_private_ref(
1175 struct v4l2_ctrl_handler *hdl, u32 id)
1177 struct v4l2_ctrl_ref *ref;
1179 id -= V4L2_CID_PRIVATE_BASE;
1180 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1181 /* Search for private user controls that are compatible with
1183 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER &&
1184 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) {
1185 if (!ref->ctrl->is_int)
1195 /* Find a control with the given ID. */
1196 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id)
1198 struct v4l2_ctrl_ref *ref;
1201 id &= V4L2_CTRL_ID_MASK;
1203 /* Old-style private controls need special handling */
1204 if (id >= V4L2_CID_PRIVATE_BASE)
1205 return find_private_ref(hdl, id);
1206 bucket = id % hdl->nr_of_buckets;
1208 /* Simple optimization: cache the last control found */
1209 if (hdl->cached && hdl->cached->ctrl->id == id)
1212 /* Not in cache, search the hash */
1213 ref = hdl->buckets ? hdl->buckets[bucket] : NULL;
1214 while (ref && ref->ctrl->id != id)
1218 hdl->cached = ref; /* cache it! */
1222 /* Find a control with the given ID. Take the handler's lock first. */
1223 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id)
1225 struct v4l2_ctrl_ref *ref = NULL;
1228 mutex_lock(hdl->lock);
1229 ref = find_ref(hdl, id);
1230 mutex_unlock(hdl->lock);
1235 /* Find a control with the given ID. */
1236 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id)
1238 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id);
1240 return ref ? ref->ctrl : NULL;
1242 EXPORT_SYMBOL(v4l2_ctrl_find);
1244 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */
1245 int handler_new_ref(struct v4l2_ctrl_handler *hdl,
1246 struct v4l2_ctrl *ctrl,
1247 struct v4l2_ctrl_ref **ctrl_ref,
1248 bool from_other_dev, bool allocate_req)
1250 struct v4l2_ctrl_ref *ref;
1251 struct v4l2_ctrl_ref *new_ref;
1253 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1;
1254 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */
1255 unsigned int size_extra_req = 0;
1261 * Automatically add the control class if it is not yet present and
1262 * the new control is not a compound control.
1264 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES &&
1265 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL)
1266 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0))
1273 size_extra_req = ctrl->elems * ctrl->elem_size;
1274 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL);
1276 return handler_set_err(hdl, -ENOMEM);
1277 new_ref->ctrl = ctrl;
1278 new_ref->from_other_dev = from_other_dev;
1280 new_ref->p_req.p = &new_ref[1];
1282 INIT_LIST_HEAD(&new_ref->node);
1284 mutex_lock(hdl->lock);
1286 /* Add immediately at the end of the list if the list is empty, or if
1287 the last element in the list has a lower ID.
1288 This ensures that when elements are added in ascending order the
1289 insertion is an O(1) operation. */
1290 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) {
1291 list_add_tail(&new_ref->node, &hdl->ctrl_refs);
1292 goto insert_in_hash;
1295 /* Find insert position in sorted list */
1296 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1297 if (ref->ctrl->id < id)
1299 /* Don't add duplicates */
1300 if (ref->ctrl->id == id) {
1304 list_add(&new_ref->node, ref->node.prev);
1309 /* Insert the control node in the hash */
1310 new_ref->next = hdl->buckets[bucket];
1311 hdl->buckets[bucket] = new_ref;
1313 *ctrl_ref = new_ref;
1314 if (ctrl->handler == hdl) {
1315 /* By default each control starts in a cluster of its own.
1316 * new_ref->ctrl is basically a cluster array with one
1317 * element, so that's perfect to use as the cluster pointer.
1318 * But only do this for the handler that owns the control.
1320 ctrl->cluster = &new_ref->ctrl;
1321 ctrl->ncontrols = 1;
1325 mutex_unlock(hdl->lock);
1329 /* Add a new control */
1330 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl,
1331 const struct v4l2_ctrl_ops *ops,
1332 const struct v4l2_ctrl_type_ops *type_ops,
1333 u32 id, const char *name, enum v4l2_ctrl_type type,
1334 s64 min, s64 max, u64 step, s64 def,
1335 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size,
1336 u32 flags, const char * const *qmenu,
1337 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def,
1340 struct v4l2_ctrl *ctrl;
1342 unsigned nr_of_dims = 0;
1345 unsigned tot_ctrl_size;
1353 while (dims && dims[nr_of_dims]) {
1354 elems *= dims[nr_of_dims];
1356 if (nr_of_dims == V4L2_CTRL_MAX_DIMS)
1359 is_array = nr_of_dims > 0;
1361 /* Prefill elem_size for all types handled by std_type_ops */
1362 switch ((u32)type) {
1363 case V4L2_CTRL_TYPE_INTEGER64:
1364 elem_size = sizeof(s64);
1366 case V4L2_CTRL_TYPE_STRING:
1367 elem_size = max + 1;
1369 case V4L2_CTRL_TYPE_U8:
1370 elem_size = sizeof(u8);
1372 case V4L2_CTRL_TYPE_U16:
1373 elem_size = sizeof(u16);
1375 case V4L2_CTRL_TYPE_U32:
1376 elem_size = sizeof(u32);
1378 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
1379 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence);
1381 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
1382 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture);
1384 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
1385 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation);
1387 case V4L2_CTRL_TYPE_FWHT_PARAMS:
1388 elem_size = sizeof(struct v4l2_ctrl_fwht_params);
1390 case V4L2_CTRL_TYPE_H264_SPS:
1391 elem_size = sizeof(struct v4l2_ctrl_h264_sps);
1393 case V4L2_CTRL_TYPE_H264_PPS:
1394 elem_size = sizeof(struct v4l2_ctrl_h264_pps);
1396 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
1397 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix);
1399 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
1400 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params);
1402 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
1403 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params);
1405 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
1406 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights);
1408 case V4L2_CTRL_TYPE_VP8_FRAME:
1409 elem_size = sizeof(struct v4l2_ctrl_vp8_frame);
1411 case V4L2_CTRL_TYPE_HEVC_SPS:
1412 elem_size = sizeof(struct v4l2_ctrl_hevc_sps);
1414 case V4L2_CTRL_TYPE_HEVC_PPS:
1415 elem_size = sizeof(struct v4l2_ctrl_hevc_pps);
1417 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
1418 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params);
1420 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
1421 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix);
1423 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
1424 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params);
1426 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
1427 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info);
1429 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
1430 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display);
1432 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
1433 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr);
1435 case V4L2_CTRL_TYPE_VP9_FRAME:
1436 elem_size = sizeof(struct v4l2_ctrl_vp9_frame);
1438 case V4L2_CTRL_TYPE_AREA:
1439 elem_size = sizeof(struct v4l2_area);
1442 if (type < V4L2_CTRL_COMPOUND_TYPES)
1443 elem_size = sizeof(s32);
1446 tot_ctrl_size = elem_size * elems;
1449 if (id == 0 || name == NULL || !elem_size ||
1450 id >= V4L2_CID_PRIVATE_BASE ||
1451 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) ||
1452 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) {
1453 handler_set_err(hdl, -ERANGE);
1456 err = check_range(type, min, max, step, def);
1458 handler_set_err(hdl, err);
1462 (type == V4L2_CTRL_TYPE_BUTTON ||
1463 type == V4L2_CTRL_TYPE_CTRL_CLASS)) {
1464 handler_set_err(hdl, -EINVAL);
1469 if (type == V4L2_CTRL_TYPE_BUTTON)
1470 flags |= V4L2_CTRL_FLAG_WRITE_ONLY |
1471 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE;
1472 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS)
1473 flags |= V4L2_CTRL_FLAG_READ_ONLY;
1474 else if (type == V4L2_CTRL_TYPE_INTEGER64 ||
1475 type == V4L2_CTRL_TYPE_STRING ||
1476 type >= V4L2_CTRL_COMPOUND_TYPES ||
1478 sz_extra += 2 * tot_ctrl_size;
1480 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const)
1481 sz_extra += elem_size;
1483 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL);
1485 handler_set_err(hdl, -ENOMEM);
1489 INIT_LIST_HEAD(&ctrl->node);
1490 INIT_LIST_HEAD(&ctrl->ev_subs);
1491 ctrl->handler = hdl;
1493 ctrl->type_ops = type_ops ? type_ops : &std_type_ops;
1497 ctrl->flags = flags;
1498 ctrl->minimum = min;
1499 ctrl->maximum = max;
1501 ctrl->default_value = def;
1502 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING;
1503 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string;
1504 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64;
1505 ctrl->is_array = is_array;
1506 ctrl->elems = elems;
1507 ctrl->nr_of_dims = nr_of_dims;
1509 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0]));
1510 ctrl->elem_size = elem_size;
1511 if (type == V4L2_CTRL_TYPE_MENU)
1512 ctrl->qmenu = qmenu;
1513 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1514 ctrl->qmenu_int = qmenu_int;
1516 ctrl->cur.val = ctrl->val = def;
1519 if (!ctrl->is_int) {
1520 ctrl->p_new.p = data;
1521 ctrl->p_cur.p = data + tot_ctrl_size;
1523 ctrl->p_new.p = &ctrl->val;
1524 ctrl->p_cur.p = &ctrl->cur.val;
1527 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) {
1528 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size;
1529 memcpy(ctrl->p_def.p, p_def.p_const, elem_size);
1532 for (idx = 0; idx < elems; idx++) {
1533 ctrl->type_ops->init(ctrl, idx, ctrl->p_cur);
1534 ctrl->type_ops->init(ctrl, idx, ctrl->p_new);
1537 if (handler_new_ref(hdl, ctrl, NULL, false, false)) {
1541 mutex_lock(hdl->lock);
1542 list_add_tail(&ctrl->node, &hdl->ctrls);
1543 mutex_unlock(hdl->lock);
1547 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl,
1548 const struct v4l2_ctrl_config *cfg, void *priv)
1551 struct v4l2_ctrl *ctrl;
1552 const char *name = cfg->name;
1553 const char * const *qmenu = cfg->qmenu;
1554 const s64 *qmenu_int = cfg->qmenu_int;
1555 enum v4l2_ctrl_type type = cfg->type;
1556 u32 flags = cfg->flags;
1559 u64 step = cfg->step;
1563 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step,
1566 is_menu = (type == V4L2_CTRL_TYPE_MENU ||
1567 type == V4L2_CTRL_TYPE_INTEGER_MENU);
1571 WARN_ON(cfg->menu_skip_mask);
1572 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) {
1573 qmenu = v4l2_ctrl_get_menu(cfg->id);
1574 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) {
1575 handler_set_err(hdl, -EINVAL);
1579 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name,
1581 is_menu ? cfg->menu_skip_mask : step, def,
1582 cfg->dims, cfg->elem_size,
1583 flags, qmenu, qmenu_int, cfg->p_def, priv);
1585 ctrl->is_private = cfg->is_private;
1588 EXPORT_SYMBOL(v4l2_ctrl_new_custom);
1590 /* Helper function for standard non-menu controls */
1591 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl,
1592 const struct v4l2_ctrl_ops *ops,
1593 u32 id, s64 min, s64 max, u64 step, s64 def)
1596 enum v4l2_ctrl_type type;
1599 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1600 if (type == V4L2_CTRL_TYPE_MENU ||
1601 type == V4L2_CTRL_TYPE_INTEGER_MENU ||
1602 type >= V4L2_CTRL_COMPOUND_TYPES) {
1603 handler_set_err(hdl, -EINVAL);
1606 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1607 min, max, step, def, NULL, 0,
1608 flags, NULL, NULL, ptr_null, NULL);
1610 EXPORT_SYMBOL(v4l2_ctrl_new_std);
1612 /* Helper function for standard menu controls */
1613 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl,
1614 const struct v4l2_ctrl_ops *ops,
1615 u32 id, u8 _max, u64 mask, u8 _def)
1617 const char * const *qmenu = NULL;
1618 const s64 *qmenu_int = NULL;
1619 unsigned int qmenu_int_len = 0;
1621 enum v4l2_ctrl_type type;
1628 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1630 if (type == V4L2_CTRL_TYPE_MENU)
1631 qmenu = v4l2_ctrl_get_menu(id);
1632 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1633 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len);
1635 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) {
1636 handler_set_err(hdl, -EINVAL);
1639 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1640 0, max, mask, def, NULL, 0,
1641 flags, qmenu, qmenu_int, ptr_null, NULL);
1643 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu);
1645 /* Helper function for standard menu controls with driver defined menu */
1646 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl,
1647 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max,
1648 u64 mask, u8 _def, const char * const *qmenu)
1650 enum v4l2_ctrl_type type;
1658 /* v4l2_ctrl_new_std_menu_items() should only be called for
1659 * standard controls without a standard menu.
1661 if (v4l2_ctrl_get_menu(id)) {
1662 handler_set_err(hdl, -EINVAL);
1666 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1667 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) {
1668 handler_set_err(hdl, -EINVAL);
1671 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1672 0, max, mask, def, NULL, 0,
1673 flags, qmenu, NULL, ptr_null, NULL);
1676 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items);
1678 /* Helper function for standard compound controls */
1679 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl,
1680 const struct v4l2_ctrl_ops *ops, u32 id,
1681 const union v4l2_ctrl_ptr p_def)
1684 enum v4l2_ctrl_type type;
1686 s64 min, max, step, def;
1688 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1689 if (type < V4L2_CTRL_COMPOUND_TYPES) {
1690 handler_set_err(hdl, -EINVAL);
1693 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1694 min, max, step, def, NULL, 0,
1695 flags, NULL, NULL, p_def, NULL);
1697 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound);
1699 /* Helper function for standard integer menu controls */
1700 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl,
1701 const struct v4l2_ctrl_ops *ops,
1702 u32 id, u8 _max, u8 _def, const s64 *qmenu_int)
1705 enum v4l2_ctrl_type type;
1712 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1713 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) {
1714 handler_set_err(hdl, -EINVAL);
1717 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1718 0, max, 0, def, NULL, 0,
1719 flags, NULL, qmenu_int, ptr_null, NULL);
1721 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu);
1723 /* Add the controls from another handler to our own. */
1724 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl,
1725 struct v4l2_ctrl_handler *add,
1726 bool (*filter)(const struct v4l2_ctrl *ctrl),
1727 bool from_other_dev)
1729 struct v4l2_ctrl_ref *ref;
1732 /* Do nothing if either handler is NULL or if they are the same */
1733 if (!hdl || !add || hdl == add)
1737 mutex_lock(add->lock);
1738 list_for_each_entry(ref, &add->ctrl_refs, node) {
1739 struct v4l2_ctrl *ctrl = ref->ctrl;
1741 /* Skip handler-private controls. */
1742 if (ctrl->is_private)
1744 /* And control classes */
1745 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
1747 /* Filter any unwanted controls */
1748 if (filter && !filter(ctrl))
1750 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false);
1754 mutex_unlock(add->lock);
1757 EXPORT_SYMBOL(v4l2_ctrl_add_handler);
1759 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl)
1761 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX)
1763 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX)
1766 case V4L2_CID_AUDIO_MUTE:
1767 case V4L2_CID_AUDIO_VOLUME:
1768 case V4L2_CID_AUDIO_BALANCE:
1769 case V4L2_CID_AUDIO_BASS:
1770 case V4L2_CID_AUDIO_TREBLE:
1771 case V4L2_CID_AUDIO_LOUDNESS:
1778 EXPORT_SYMBOL(v4l2_ctrl_radio_filter);
1780 /* Cluster controls */
1781 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls)
1783 bool has_volatiles = false;
1786 /* The first control is the master control and it must not be NULL */
1787 if (WARN_ON(ncontrols == 0 || controls[0] == NULL))
1790 for (i = 0; i < ncontrols; i++) {
1792 controls[i]->cluster = controls;
1793 controls[i]->ncontrols = ncontrols;
1794 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE)
1795 has_volatiles = true;
1798 controls[0]->has_volatiles = has_volatiles;
1800 EXPORT_SYMBOL(v4l2_ctrl_cluster);
1802 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls,
1803 u8 manual_val, bool set_volatile)
1805 struct v4l2_ctrl *master = controls[0];
1809 v4l2_ctrl_cluster(ncontrols, controls);
1810 WARN_ON(ncontrols <= 1);
1811 WARN_ON(manual_val < master->minimum || manual_val > master->maximum);
1812 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl));
1813 master->is_auto = true;
1814 master->has_volatiles = set_volatile;
1815 master->manual_mode_value = manual_val;
1816 master->flags |= V4L2_CTRL_FLAG_UPDATE;
1818 if (!is_cur_manual(master))
1819 flag = V4L2_CTRL_FLAG_INACTIVE |
1820 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0);
1822 for (i = 1; i < ncontrols; i++)
1824 controls[i]->flags |= flag;
1826 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster);
1829 * Obtain the current volatile values of an autocluster and mark them
1832 void update_from_auto_cluster(struct v4l2_ctrl *master)
1836 for (i = 1; i < master->ncontrols; i++)
1837 cur_to_new(master->cluster[i]);
1838 if (!call_op(master, g_volatile_ctrl))
1839 for (i = 1; i < master->ncontrols; i++)
1840 if (master->cluster[i])
1841 master->cluster[i]->is_new = 1;
1845 * Return non-zero if one or more of the controls in the cluster has a new
1846 * value that differs from the current value.
1848 static int cluster_changed(struct v4l2_ctrl *master)
1850 bool changed = false;
1854 for (i = 0; i < master->ncontrols; i++) {
1855 struct v4l2_ctrl *ctrl = master->cluster[i];
1856 bool ctrl_changed = false;
1861 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) {
1863 ctrl_changed = true;
1867 * Set has_changed to false to avoid generating
1868 * the event V4L2_EVENT_CTRL_CH_VALUE
1870 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) {
1871 ctrl->has_changed = false;
1875 for (idx = 0; !ctrl_changed && idx < ctrl->elems; idx++)
1876 ctrl_changed = !ctrl->type_ops->equal(ctrl, idx,
1877 ctrl->p_cur, ctrl->p_new);
1878 ctrl->has_changed = ctrl_changed;
1879 changed |= ctrl->has_changed;
1885 * Core function that calls try/s_ctrl and ensures that the new value is
1886 * copied to the current value on a set.
1887 * Must be called with ctrl->handler->lock held.
1889 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master,
1890 bool set, u32 ch_flags)
1897 * Go through the cluster and either validate the new value or
1898 * (if no new value was set), copy the current value to the new
1899 * value, ensuring a consistent view for the control ops when
1902 for (i = 0; i < master->ncontrols; i++) {
1903 struct v4l2_ctrl *ctrl = master->cluster[i];
1908 if (!ctrl->is_new) {
1913 * Check again: it may have changed since the
1914 * previous check in try_or_set_ext_ctrls().
1916 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED))
1920 ret = call_op(master, try_ctrl);
1922 /* Don't set if there is no change */
1923 if (ret || !set || !cluster_changed(master))
1925 ret = call_op(master, s_ctrl);
1929 /* If OK, then make the new values permanent. */
1930 update_flag = is_cur_manual(master) != is_new_manual(master);
1932 for (i = 0; i < master->ncontrols; i++) {
1934 * If we switch from auto to manual mode, and this cluster
1935 * contains volatile controls, then all non-master controls
1936 * have to be marked as changed. The 'new' value contains
1937 * the volatile value (obtained by update_from_auto_cluster),
1938 * which now has to become the current value.
1940 if (i && update_flag && is_new_manual(master) &&
1941 master->has_volatiles && master->cluster[i])
1942 master->cluster[i]->has_changed = true;
1944 new_to_cur(fh, master->cluster[i], ch_flags |
1945 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0));
1950 /* Activate/deactivate a control. */
1951 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active)
1953 /* invert since the actual flag is called 'inactive' */
1954 bool inactive = !active;
1961 /* set V4L2_CTRL_FLAG_INACTIVE */
1962 old = test_and_set_bit(4, &ctrl->flags);
1964 /* clear V4L2_CTRL_FLAG_INACTIVE */
1965 old = test_and_clear_bit(4, &ctrl->flags);
1966 if (old != inactive)
1967 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
1969 EXPORT_SYMBOL(v4l2_ctrl_activate);
1971 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed)
1978 lockdep_assert_held(ctrl->handler->lock);
1981 /* set V4L2_CTRL_FLAG_GRABBED */
1982 old = test_and_set_bit(1, &ctrl->flags);
1984 /* clear V4L2_CTRL_FLAG_GRABBED */
1985 old = test_and_clear_bit(1, &ctrl->flags);
1987 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
1989 EXPORT_SYMBOL(__v4l2_ctrl_grab);
1991 /* Call s_ctrl for all controls owned by the handler */
1992 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
1994 struct v4l2_ctrl *ctrl;
2000 lockdep_assert_held(hdl->lock);
2002 list_for_each_entry(ctrl, &hdl->ctrls, node)
2005 list_for_each_entry(ctrl, &hdl->ctrls, node) {
2006 struct v4l2_ctrl *master = ctrl->cluster[0];
2009 /* Skip if this control was already handled by a cluster. */
2010 /* Skip button controls and read-only controls. */
2011 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON ||
2012 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY))
2015 for (i = 0; i < master->ncontrols; i++) {
2016 if (master->cluster[i]) {
2017 cur_to_new(master->cluster[i]);
2018 master->cluster[i]->is_new = 1;
2019 master->cluster[i]->done = true;
2022 ret = call_op(master, s_ctrl);
2029 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup);
2031 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
2038 mutex_lock(hdl->lock);
2039 ret = __v4l2_ctrl_handler_setup(hdl);
2040 mutex_unlock(hdl->lock);
2044 EXPORT_SYMBOL(v4l2_ctrl_handler_setup);
2046 /* Log the control name and value */
2047 static void log_ctrl(const struct v4l2_ctrl *ctrl,
2048 const char *prefix, const char *colon)
2050 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY))
2052 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
2055 pr_info("%s%s%s: ", prefix, colon, ctrl->name);
2057 ctrl->type_ops->log(ctrl);
2059 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE |
2060 V4L2_CTRL_FLAG_GRABBED |
2061 V4L2_CTRL_FLAG_VOLATILE)) {
2062 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE)
2063 pr_cont(" inactive");
2064 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)
2065 pr_cont(" grabbed");
2066 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE)
2067 pr_cont(" volatile");
2072 /* Log all controls owned by the handler */
2073 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl,
2076 struct v4l2_ctrl *ctrl;
2077 const char *colon = "";
2084 len = strlen(prefix);
2085 if (len && prefix[len - 1] != ' ')
2087 mutex_lock(hdl->lock);
2088 list_for_each_entry(ctrl, &hdl->ctrls, node)
2089 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED))
2090 log_ctrl(ctrl, prefix, colon);
2091 mutex_unlock(hdl->lock);
2093 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status);
2095 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl,
2096 const struct v4l2_ctrl_ops *ctrl_ops,
2097 const struct v4l2_fwnode_device_properties *p)
2099 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) {
2100 u32 orientation_ctrl;
2102 switch (p->orientation) {
2103 case V4L2_FWNODE_ORIENTATION_FRONT:
2104 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT;
2106 case V4L2_FWNODE_ORIENTATION_BACK:
2107 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK;
2109 case V4L2_FWNODE_ORIENTATION_EXTERNAL:
2110 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL;
2115 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops,
2116 V4L2_CID_CAMERA_ORIENTATION,
2117 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0,
2122 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) {
2123 if (!v4l2_ctrl_new_std(hdl, ctrl_ops,
2124 V4L2_CID_CAMERA_SENSOR_ROTATION,
2125 p->rotation, p->rotation, 1,
2132 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties);