1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * V4L2 controls framework core implementation.
8 #include <linux/export.h>
10 #include <linux/slab.h>
11 #include <media/v4l2-ctrls.h>
12 #include <media/v4l2-event.h>
13 #include <media/v4l2-fwnode.h>
15 #include "v4l2-ctrls-priv.h"
17 static const union v4l2_ctrl_ptr ptr_null;
19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl,
22 memset(ev, 0, sizeof(*ev));
23 ev->type = V4L2_EVENT_CTRL;
25 ev->u.ctrl.changes = changes;
26 ev->u.ctrl.type = ctrl->type;
27 ev->u.ctrl.flags = user_flags(ctrl);
29 ev->u.ctrl.value64 = 0;
31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64;
32 ev->u.ctrl.minimum = ctrl->minimum;
33 ev->u.ctrl.maximum = ctrl->maximum;
34 if (ctrl->type == V4L2_CTRL_TYPE_MENU
35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU)
38 ev->u.ctrl.step = ctrl->step;
39 ev->u.ctrl.default_value = ctrl->default_value;
42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl)
45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS;
47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY))
48 changes |= V4L2_EVENT_CTRL_CH_VALUE;
49 fill_event(&ev, ctrl, changes);
50 v4l2_event_queue_fh(fh, &ev);
53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes)
56 struct v4l2_subscribed_event *sev;
58 if (list_empty(&ctrl->ev_subs))
60 fill_event(&ev, ctrl, changes);
62 list_for_each_entry(sev, &ctrl->ev_subs, node)
64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK))
65 v4l2_event_queue_fh(sev->fh, &ev);
68 static bool std_equal(const struct v4l2_ctrl *ctrl, u32 idx,
69 union v4l2_ctrl_ptr ptr1,
70 union v4l2_ctrl_ptr ptr2)
73 case V4L2_CTRL_TYPE_BUTTON:
75 case V4L2_CTRL_TYPE_STRING:
76 idx *= ctrl->elem_size;
77 /* strings are always 0-terminated */
78 return !strcmp(ptr1.p_char + idx, ptr2.p_char + idx);
79 case V4L2_CTRL_TYPE_INTEGER64:
80 return ptr1.p_s64[idx] == ptr2.p_s64[idx];
81 case V4L2_CTRL_TYPE_U8:
82 return ptr1.p_u8[idx] == ptr2.p_u8[idx];
83 case V4L2_CTRL_TYPE_U16:
84 return ptr1.p_u16[idx] == ptr2.p_u16[idx];
85 case V4L2_CTRL_TYPE_U32:
86 return ptr1.p_u32[idx] == ptr2.p_u32[idx];
89 return ptr1.p_s32[idx] == ptr2.p_s32[idx];
90 idx *= ctrl->elem_size;
91 return !memcmp(ptr1.p_const + idx, ptr2.p_const + idx,
96 /* Default intra MPEG-2 quantisation coefficients, from the specification. */
97 static const u8 mpeg2_intra_quant_matrix[64] = {
98 8, 16, 16, 19, 16, 19, 22, 22,
99 22, 22, 22, 22, 26, 24, 26, 27,
100 27, 27, 26, 26, 26, 26, 27, 27,
101 27, 29, 29, 29, 34, 34, 34, 29,
102 29, 29, 27, 27, 29, 29, 32, 32,
103 34, 34, 37, 38, 37, 35, 35, 34,
104 35, 38, 38, 40, 40, 40, 48, 48,
105 46, 46, 56, 56, 58, 69, 69, 83
108 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx,
109 union v4l2_ctrl_ptr ptr)
111 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
112 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
113 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant;
114 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
115 struct v4l2_ctrl_vp9_frame *p_vp9_frame;
116 struct v4l2_ctrl_fwht_params *p_fwht_params;
117 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix;
118 void *p = ptr.p + idx * ctrl->elem_size;
120 if (ctrl->p_def.p_const)
121 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size);
123 memset(p, 0, ctrl->elem_size);
125 switch ((u32)ctrl->type) {
126 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
127 p_mpeg2_sequence = p;
130 p_mpeg2_sequence->chroma_format = 1;
132 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
135 /* interlaced top field */
136 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD;
137 p_mpeg2_picture->picture_coding_type =
138 V4L2_MPEG2_PIC_CODING_TYPE_I;
140 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
143 memcpy(p_mpeg2_quant->intra_quantiser_matrix,
144 mpeg2_intra_quant_matrix,
145 ARRAY_SIZE(mpeg2_intra_quant_matrix));
147 * The default non-intra MPEG-2 quantisation
148 * coefficients are all 16, as per the specification.
150 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16,
151 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix));
153 case V4L2_CTRL_TYPE_VP8_FRAME:
155 p_vp8_frame->num_dct_parts = 1;
157 case V4L2_CTRL_TYPE_VP9_FRAME:
159 p_vp9_frame->profile = 0;
160 p_vp9_frame->bit_depth = 8;
161 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING |
162 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING;
164 case V4L2_CTRL_TYPE_FWHT_PARAMS:
166 p_fwht_params->version = V4L2_FWHT_VERSION;
167 p_fwht_params->width = 1280;
168 p_fwht_params->height = 720;
169 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV |
170 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET);
172 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
173 p_h264_scaling_matrix = p;
175 * The default (flat) H.264 scaling matrix when none are
176 * specified in the bitstream, this is according to formulas
177 * (7-8) and (7-9) of the specification.
179 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix));
184 static void std_init(const struct v4l2_ctrl *ctrl, u32 idx,
185 union v4l2_ctrl_ptr ptr)
187 switch (ctrl->type) {
188 case V4L2_CTRL_TYPE_STRING:
189 idx *= ctrl->elem_size;
190 memset(ptr.p_char + idx, ' ', ctrl->minimum);
191 ptr.p_char[idx + ctrl->minimum] = '\0';
193 case V4L2_CTRL_TYPE_INTEGER64:
194 ptr.p_s64[idx] = ctrl->default_value;
196 case V4L2_CTRL_TYPE_INTEGER:
197 case V4L2_CTRL_TYPE_INTEGER_MENU:
198 case V4L2_CTRL_TYPE_MENU:
199 case V4L2_CTRL_TYPE_BITMASK:
200 case V4L2_CTRL_TYPE_BOOLEAN:
201 ptr.p_s32[idx] = ctrl->default_value;
203 case V4L2_CTRL_TYPE_BUTTON:
204 case V4L2_CTRL_TYPE_CTRL_CLASS:
207 case V4L2_CTRL_TYPE_U8:
208 ptr.p_u8[idx] = ctrl->default_value;
210 case V4L2_CTRL_TYPE_U16:
211 ptr.p_u16[idx] = ctrl->default_value;
213 case V4L2_CTRL_TYPE_U32:
214 ptr.p_u32[idx] = ctrl->default_value;
217 std_init_compound(ctrl, idx, ptr);
222 static void std_log(const struct v4l2_ctrl *ctrl)
224 union v4l2_ctrl_ptr ptr = ctrl->p_cur;
226 if (ctrl->is_array) {
229 for (i = 0; i < ctrl->nr_of_dims; i++)
230 pr_cont("[%u]", ctrl->dims[i]);
234 switch (ctrl->type) {
235 case V4L2_CTRL_TYPE_INTEGER:
236 pr_cont("%d", *ptr.p_s32);
238 case V4L2_CTRL_TYPE_BOOLEAN:
239 pr_cont("%s", *ptr.p_s32 ? "true" : "false");
241 case V4L2_CTRL_TYPE_MENU:
242 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]);
244 case V4L2_CTRL_TYPE_INTEGER_MENU:
245 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]);
247 case V4L2_CTRL_TYPE_BITMASK:
248 pr_cont("0x%08x", *ptr.p_s32);
250 case V4L2_CTRL_TYPE_INTEGER64:
251 pr_cont("%lld", *ptr.p_s64);
253 case V4L2_CTRL_TYPE_STRING:
254 pr_cont("%s", ptr.p_char);
256 case V4L2_CTRL_TYPE_U8:
257 pr_cont("%u", (unsigned)*ptr.p_u8);
259 case V4L2_CTRL_TYPE_U16:
260 pr_cont("%u", (unsigned)*ptr.p_u16);
262 case V4L2_CTRL_TYPE_U32:
263 pr_cont("%u", (unsigned)*ptr.p_u32);
265 case V4L2_CTRL_TYPE_H264_SPS:
268 case V4L2_CTRL_TYPE_H264_PPS:
271 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
272 pr_cont("H264_SCALING_MATRIX");
274 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
275 pr_cont("H264_SLICE_PARAMS");
277 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
278 pr_cont("H264_DECODE_PARAMS");
280 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
281 pr_cont("H264_PRED_WEIGHTS");
283 case V4L2_CTRL_TYPE_FWHT_PARAMS:
284 pr_cont("FWHT_PARAMS");
286 case V4L2_CTRL_TYPE_VP8_FRAME:
287 pr_cont("VP8_FRAME");
289 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
290 pr_cont("HDR10_CLL_INFO");
292 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
293 pr_cont("HDR10_MASTERING_DISPLAY");
295 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
296 pr_cont("MPEG2_QUANTISATION");
298 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
299 pr_cont("MPEG2_SEQUENCE");
301 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
302 pr_cont("MPEG2_PICTURE");
304 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
305 pr_cont("VP9_COMPRESSED_HDR");
307 case V4L2_CTRL_TYPE_VP9_FRAME:
308 pr_cont("VP9_FRAME");
310 case V4L2_CTRL_TYPE_HEVC_SPS:
313 case V4L2_CTRL_TYPE_HEVC_PPS:
316 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
317 pr_cont("HEVC_SLICE_PARAMS");
319 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
320 pr_cont("HEVC_SCALING_MATRIX");
322 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
323 pr_cont("HEVC_DECODE_PARAMS");
326 pr_cont("unknown type %d", ctrl->type);
332 * Round towards the closest legal value. Be careful when we are
333 * close to the maximum range of the control type to prevent
336 #define ROUND_TO_RANGE(val, offset_type, ctrl) \
338 offset_type offset; \
339 if ((ctrl)->maximum >= 0 && \
340 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \
341 val = (ctrl)->maximum; \
343 val += (s32)((ctrl)->step / 2); \
344 val = clamp_t(typeof(val), val, \
345 (ctrl)->minimum, (ctrl)->maximum); \
346 offset = (val) - (ctrl)->minimum; \
347 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \
348 val = (ctrl)->minimum + offset; \
352 /* Validate a new control */
354 #define zero_padding(s) \
355 memset(&(s).padding, 0, sizeof((s).padding))
356 #define zero_reserved(s) \
357 memset(&(s).reserved, 0, sizeof((s).reserved))
360 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf)
364 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED |
365 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE))
368 /* That all values are in the accepted range. */
369 if (lf->level > GENMASK(5, 0))
372 if (lf->sharpness > GENMASK(2, 0))
375 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++)
376 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63)
379 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++)
380 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63)
388 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant)
390 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 ||
391 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 ||
392 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15)
395 zero_reserved(*quant);
400 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg)
404 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED |
405 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP |
406 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE |
407 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA |
408 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
411 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) {
412 if (seg->feature_enabled[i] &
413 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK)
417 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) {
418 static const int range[] = { 255, 63, 3, 0 };
420 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) {
421 if (seg->feature_data[i][j] < -range[j] ||
422 seg->feature_data[i][j] > range[j])
432 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr)
434 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT)
441 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame)
445 /* Make sure we're not passed invalid flags. */
446 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
447 V4L2_VP9_FRAME_FLAG_SHOW_FRAME |
448 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT |
449 V4L2_VP9_FRAME_FLAG_INTRA_ONLY |
450 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV |
451 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX |
452 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE |
453 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING |
454 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING |
455 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING))
458 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT &&
459 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX)
462 if (frame->profile > V4L2_VP9_PROFILE_MAX)
465 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL)
468 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX)
472 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10
475 if ((frame->profile < 2 && frame->bit_depth != 8) ||
476 (frame->profile >= 2 &&
477 (frame->bit_depth != 10 && frame->bit_depth != 12)))
480 /* Profile 0 and 2 only accept YUV 4:2:0. */
481 if ((frame->profile == 0 || frame->profile == 2) &&
482 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) ||
483 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
486 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */
487 if ((frame->profile == 1 || frame->profile == 3) &&
488 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) &&
489 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING)))
492 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE)
496 * According to the spec, tile_cols_log2 shall be less than or equal
499 if (frame->tile_cols_log2 > 6)
502 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT)
505 ret = validate_vp9_lf_params(&frame->lf);
509 ret = validate_vp9_quant_params(&frame->quant);
513 ret = validate_vp9_seg_params(&frame->seg);
517 zero_reserved(*frame);
522 * Compound controls validation requires setting unused fields/flags to zero
523 * in order to properly detect unchanged controls with std_equal's memcmp.
525 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx,
526 union v4l2_ctrl_ptr ptr)
528 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
529 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
530 struct v4l2_ctrl_vp8_frame *p_vp8_frame;
531 struct v4l2_ctrl_fwht_params *p_fwht_params;
532 struct v4l2_ctrl_h264_sps *p_h264_sps;
533 struct v4l2_ctrl_h264_pps *p_h264_pps;
534 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights;
535 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params;
536 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params;
537 struct v4l2_ctrl_hevc_sps *p_hevc_sps;
538 struct v4l2_ctrl_hevc_pps *p_hevc_pps;
539 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering;
540 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params;
541 struct v4l2_area *area;
542 void *p = ptr.p + idx * ctrl->elem_size;
545 switch ((u32)ctrl->type) {
546 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
547 p_mpeg2_sequence = p;
549 switch (p_mpeg2_sequence->chroma_format) {
559 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
562 switch (p_mpeg2_picture->intra_dc_precision) {
565 case 2: /* 10 bits */
566 case 3: /* 11 bits */
572 switch (p_mpeg2_picture->picture_structure) {
573 case V4L2_MPEG2_PIC_TOP_FIELD:
574 case V4L2_MPEG2_PIC_BOTTOM_FIELD:
575 case V4L2_MPEG2_PIC_FRAME:
581 switch (p_mpeg2_picture->picture_coding_type) {
582 case V4L2_MPEG2_PIC_CODING_TYPE_I:
583 case V4L2_MPEG2_PIC_CODING_TYPE_P:
584 case V4L2_MPEG2_PIC_CODING_TYPE_B:
589 zero_reserved(*p_mpeg2_picture);
592 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
595 case V4L2_CTRL_TYPE_FWHT_PARAMS:
597 if (p_fwht_params->version < V4L2_FWHT_VERSION)
599 if (!p_fwht_params->width || !p_fwht_params->height)
603 case V4L2_CTRL_TYPE_H264_SPS:
606 /* Some syntax elements are only conditionally valid */
607 if (p_h264_sps->pic_order_cnt_type != 0) {
608 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
609 } else if (p_h264_sps->pic_order_cnt_type != 1) {
610 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0;
611 p_h264_sps->offset_for_non_ref_pic = 0;
612 p_h264_sps->offset_for_top_to_bottom_field = 0;
613 memset(&p_h264_sps->offset_for_ref_frame, 0,
614 sizeof(p_h264_sps->offset_for_ref_frame));
617 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) {
618 p_h264_sps->chroma_format_idc = 1;
619 p_h264_sps->bit_depth_luma_minus8 = 0;
620 p_h264_sps->bit_depth_chroma_minus8 = 0;
623 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS;
625 if (p_h264_sps->chroma_format_idc < 3)
627 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE;
630 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY)
632 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD;
635 * Chroma 4:2:2 format require at least High 4:2:2 profile.
637 * The H264 specification and well-known parser implementations
638 * use profile-idc values directly, as that is clearer and
639 * less ambiguous. We do the same here.
641 if (p_h264_sps->profile_idc < 122 &&
642 p_h264_sps->chroma_format_idc > 1)
644 /* Chroma 4:4:4 format require at least High 4:2:2 profile */
645 if (p_h264_sps->profile_idc < 244 &&
646 p_h264_sps->chroma_format_idc > 2)
648 if (p_h264_sps->chroma_format_idc > 3)
651 if (p_h264_sps->bit_depth_luma_minus8 > 6)
653 if (p_h264_sps->bit_depth_chroma_minus8 > 6)
655 if (p_h264_sps->log2_max_frame_num_minus4 > 12)
657 if (p_h264_sps->pic_order_cnt_type > 2)
659 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12)
661 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN)
665 case V4L2_CTRL_TYPE_H264_PPS:
668 if (p_h264_pps->num_slice_groups_minus1 > 7)
670 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 >
671 (V4L2_H264_REF_LIST_LEN - 1))
673 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 >
674 (V4L2_H264_REF_LIST_LEN - 1))
676 if (p_h264_pps->weighted_bipred_idc > 2)
679 * pic_init_qp_minus26 shall be in the range of
680 * -(26 + QpBdOffset_y) to +25, inclusive,
681 * where QpBdOffset_y is 6 * bit_depth_luma_minus8
683 if (p_h264_pps->pic_init_qp_minus26 < -62 ||
684 p_h264_pps->pic_init_qp_minus26 > 25)
686 if (p_h264_pps->pic_init_qs_minus26 < -26 ||
687 p_h264_pps->pic_init_qs_minus26 > 25)
689 if (p_h264_pps->chroma_qp_index_offset < -12 ||
690 p_h264_pps->chroma_qp_index_offset > 12)
692 if (p_h264_pps->second_chroma_qp_index_offset < -12 ||
693 p_h264_pps->second_chroma_qp_index_offset > 12)
697 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
700 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
701 p_h264_pred_weights = p;
703 if (p_h264_pred_weights->luma_log2_weight_denom > 7)
705 if (p_h264_pred_weights->chroma_log2_weight_denom > 7)
709 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
710 p_h264_slice_params = p;
712 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
713 p_h264_slice_params->flags &=
714 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED;
716 if (p_h264_slice_params->colour_plane_id > 2)
718 if (p_h264_slice_params->cabac_init_idc > 2)
720 if (p_h264_slice_params->disable_deblocking_filter_idc > 2)
722 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 ||
723 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6)
725 if (p_h264_slice_params->slice_beta_offset_div2 < -6 ||
726 p_h264_slice_params->slice_beta_offset_div2 > 6)
729 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I ||
730 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI)
731 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0;
732 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B)
733 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0;
735 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 >
736 (V4L2_H264_REF_LIST_LEN - 1))
738 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 >
739 (V4L2_H264_REF_LIST_LEN - 1))
741 zero_reserved(*p_h264_slice_params);
744 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
745 p_h264_dec_params = p;
747 if (p_h264_dec_params->nal_ref_idc > 3)
749 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) {
750 struct v4l2_h264_dpb_entry *dpb_entry =
751 &p_h264_dec_params->dpb[i];
753 zero_reserved(*dpb_entry);
755 zero_reserved(*p_h264_dec_params);
758 case V4L2_CTRL_TYPE_VP8_FRAME:
761 switch (p_vp8_frame->num_dct_parts) {
770 zero_padding(p_vp8_frame->segment);
771 zero_padding(p_vp8_frame->lf);
772 zero_padding(p_vp8_frame->quant);
773 zero_padding(p_vp8_frame->entropy);
774 zero_padding(p_vp8_frame->coder_state);
777 case V4L2_CTRL_TYPE_HEVC_SPS:
780 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) {
781 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0;
782 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0;
783 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0;
784 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0;
787 if (!(p_hevc_sps->flags &
788 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT))
789 p_hevc_sps->num_long_term_ref_pics_sps = 0;
792 case V4L2_CTRL_TYPE_HEVC_PPS:
795 if (!(p_hevc_pps->flags &
796 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED))
797 p_hevc_pps->diff_cu_qp_delta_depth = 0;
799 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) {
800 p_hevc_pps->num_tile_columns_minus1 = 0;
801 p_hevc_pps->num_tile_rows_minus1 = 0;
802 memset(&p_hevc_pps->column_width_minus1, 0,
803 sizeof(p_hevc_pps->column_width_minus1));
804 memset(&p_hevc_pps->row_height_minus1, 0,
805 sizeof(p_hevc_pps->row_height_minus1));
808 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED;
811 if (p_hevc_pps->flags &
812 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) {
813 p_hevc_pps->pps_beta_offset_div2 = 0;
814 p_hevc_pps->pps_tc_offset_div2 = 0;
818 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
819 p_hevc_decode_params = p;
821 if (p_hevc_decode_params->num_active_dpb_entries >
822 V4L2_HEVC_DPB_ENTRIES_NUM_MAX)
826 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
829 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
832 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
833 p_hdr10_mastering = p;
835 for (i = 0; i < 3; ++i) {
836 if (p_hdr10_mastering->display_primaries_x[i] <
837 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW ||
838 p_hdr10_mastering->display_primaries_x[i] >
839 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH ||
840 p_hdr10_mastering->display_primaries_y[i] <
841 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW ||
842 p_hdr10_mastering->display_primaries_y[i] >
843 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH)
847 if (p_hdr10_mastering->white_point_x <
848 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW ||
849 p_hdr10_mastering->white_point_x >
850 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH ||
851 p_hdr10_mastering->white_point_y <
852 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW ||
853 p_hdr10_mastering->white_point_y >
854 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH)
857 if (p_hdr10_mastering->max_display_mastering_luminance <
858 V4L2_HDR10_MASTERING_MAX_LUMA_LOW ||
859 p_hdr10_mastering->max_display_mastering_luminance >
860 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH ||
861 p_hdr10_mastering->min_display_mastering_luminance <
862 V4L2_HDR10_MASTERING_MIN_LUMA_LOW ||
863 p_hdr10_mastering->min_display_mastering_luminance >
864 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
867 /* The following restriction comes from ITU-T Rec. H.265 spec */
868 if (p_hdr10_mastering->max_display_mastering_luminance ==
869 V4L2_HDR10_MASTERING_MAX_LUMA_LOW &&
870 p_hdr10_mastering->min_display_mastering_luminance ==
871 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH)
876 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
879 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
880 return validate_vp9_compressed_hdr(p);
882 case V4L2_CTRL_TYPE_VP9_FRAME:
883 return validate_vp9_frame(p);
885 case V4L2_CTRL_TYPE_AREA:
887 if (!area->width || !area->height)
898 static int std_validate(const struct v4l2_ctrl *ctrl, u32 idx,
899 union v4l2_ctrl_ptr ptr)
905 switch ((u32)ctrl->type) {
906 case V4L2_CTRL_TYPE_INTEGER:
907 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl);
908 case V4L2_CTRL_TYPE_INTEGER64:
910 * We can't use the ROUND_TO_RANGE define here due to
911 * the u64 divide that needs special care.
913 val = ptr.p_s64[idx];
914 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2))
917 val += (s64)(ctrl->step / 2);
918 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum);
919 offset = val - ctrl->minimum;
920 do_div(offset, ctrl->step);
921 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step;
923 case V4L2_CTRL_TYPE_U8:
924 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl);
925 case V4L2_CTRL_TYPE_U16:
926 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl);
927 case V4L2_CTRL_TYPE_U32:
928 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl);
930 case V4L2_CTRL_TYPE_BOOLEAN:
931 ptr.p_s32[idx] = !!ptr.p_s32[idx];
934 case V4L2_CTRL_TYPE_MENU:
935 case V4L2_CTRL_TYPE_INTEGER_MENU:
936 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum)
938 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG &&
939 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx])))
941 if (ctrl->type == V4L2_CTRL_TYPE_MENU &&
942 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0')
946 case V4L2_CTRL_TYPE_BITMASK:
947 ptr.p_s32[idx] &= ctrl->maximum;
950 case V4L2_CTRL_TYPE_BUTTON:
951 case V4L2_CTRL_TYPE_CTRL_CLASS:
955 case V4L2_CTRL_TYPE_STRING:
956 idx *= ctrl->elem_size;
957 len = strlen(ptr.p_char + idx);
958 if (len < ctrl->minimum)
960 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step)
965 return std_validate_compound(ctrl, idx, ptr);
969 static const struct v4l2_ctrl_type_ops std_type_ops = {
973 .validate = std_validate,
976 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv)
981 ctrl->call_notify = 0;
984 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify))
986 ctrl->handler->notify = notify;
987 ctrl->handler->notify_priv = priv;
988 ctrl->call_notify = 1;
990 EXPORT_SYMBOL(v4l2_ctrl_notify);
992 /* Copy the one value to another. */
993 static void ptr_to_ptr(struct v4l2_ctrl *ctrl,
994 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to,
999 memcpy(to.p, from.p_const, elems * ctrl->elem_size);
1002 /* Copy the new value to the current value. */
1003 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags)
1010 /* has_changed is set by cluster_changed */
1011 changed = ctrl->has_changed;
1013 if (ctrl->is_dyn_array)
1014 ctrl->elems = ctrl->new_elems;
1015 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems);
1018 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) {
1019 /* Note: CH_FLAGS is only set for auto clusters. */
1021 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE);
1022 if (!is_cur_manual(ctrl->cluster[0])) {
1023 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE;
1024 if (ctrl->cluster[0]->has_volatiles)
1025 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE;
1029 if (changed || ch_flags) {
1030 /* If a control was changed that was not one of the controls
1031 modified by the application, then send the event to all. */
1034 send_event(fh, ctrl,
1035 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags);
1036 if (ctrl->call_notify && changed && ctrl->handler->notify)
1037 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv);
1041 /* Copy the current value to the new value */
1042 void cur_to_new(struct v4l2_ctrl *ctrl)
1046 if (ctrl->is_dyn_array)
1047 ctrl->new_elems = ctrl->elems;
1048 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems);
1051 static bool req_alloc_dyn_array(struct v4l2_ctrl_ref *ref, u32 elems)
1055 if (elems < ref->p_req_dyn_alloc_elems)
1058 tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL);
1061 ref->p_req_dyn_enomem = true;
1064 ref->p_req_dyn_enomem = false;
1065 kvfree(ref->p_req.p);
1067 ref->p_req_dyn_alloc_elems = elems;
1071 /* Copy the new value to the request value */
1072 void new_to_req(struct v4l2_ctrl_ref *ref)
1074 struct v4l2_ctrl *ctrl;
1080 if (ctrl->is_dyn_array && !req_alloc_dyn_array(ref, ctrl->new_elems))
1083 ref->p_req_elems = ctrl->new_elems;
1084 ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems);
1085 ref->p_req_valid = true;
1088 /* Copy the current value to the request value */
1089 void cur_to_req(struct v4l2_ctrl_ref *ref)
1091 struct v4l2_ctrl *ctrl;
1097 if (ctrl->is_dyn_array && !req_alloc_dyn_array(ref, ctrl->elems))
1100 ref->p_req_elems = ctrl->elems;
1101 ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems);
1102 ref->p_req_valid = true;
1105 /* Copy the request value to the new value */
1106 int req_to_new(struct v4l2_ctrl_ref *ref)
1108 struct v4l2_ctrl *ctrl;
1116 * This control was never set in the request, so just use the current
1119 if (!ref->p_req_valid) {
1120 if (ctrl->is_dyn_array)
1121 ctrl->new_elems = ctrl->elems;
1122 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems);
1126 /* Not a dynamic array, so just copy the request value */
1127 if (!ctrl->is_dyn_array) {
1128 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems);
1132 /* Sanity check, should never happen */
1133 if (WARN_ON(!ref->p_req_dyn_alloc_elems))
1137 * Check if the number of elements in the request is more than the
1138 * elements in ctrl->p_dyn. If so, attempt to realloc ctrl->p_dyn.
1139 * Note that p_dyn is allocated with twice the number of elements
1140 * in the dynamic array since it has to store both the current and
1141 * new value of such a control.
1143 if (ref->p_req_elems > ctrl->p_dyn_alloc_elems) {
1144 unsigned int sz = ref->p_req_elems * ctrl->elem_size;
1145 void *old = ctrl->p_dyn;
1146 void *tmp = kvzalloc(2 * sz, GFP_KERNEL);
1150 memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size);
1151 memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size);
1152 ctrl->p_new.p = tmp;
1153 ctrl->p_cur.p = tmp + sz;
1155 ctrl->p_dyn_alloc_elems = ref->p_req_elems;
1159 ctrl->new_elems = ref->p_req_elems;
1160 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems);
1164 /* Control range checking */
1165 int check_range(enum v4l2_ctrl_type type,
1166 s64 min, s64 max, u64 step, s64 def)
1169 case V4L2_CTRL_TYPE_BOOLEAN:
1170 if (step != 1 || max > 1 || min < 0)
1173 case V4L2_CTRL_TYPE_U8:
1174 case V4L2_CTRL_TYPE_U16:
1175 case V4L2_CTRL_TYPE_U32:
1176 case V4L2_CTRL_TYPE_INTEGER:
1177 case V4L2_CTRL_TYPE_INTEGER64:
1178 if (step == 0 || min > max || def < min || def > max)
1181 case V4L2_CTRL_TYPE_BITMASK:
1182 if (step || min || !max || (def & ~max))
1185 case V4L2_CTRL_TYPE_MENU:
1186 case V4L2_CTRL_TYPE_INTEGER_MENU:
1187 if (min > max || def < min || def > max)
1189 /* Note: step == menu_skip_mask for menu controls.
1190 So here we check if the default value is masked out. */
1191 if (step && ((1 << def) & step))
1194 case V4L2_CTRL_TYPE_STRING:
1195 if (min > max || min < 0 || step < 1 || def)
1203 /* Set the handler's error code if it wasn't set earlier already */
1204 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err)
1206 if (hdl->error == 0)
1211 /* Initialize the handler */
1212 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl,
1213 unsigned nr_of_controls_hint,
1214 struct lock_class_key *key, const char *name)
1216 mutex_init(&hdl->_lock);
1217 hdl->lock = &hdl->_lock;
1218 lockdep_set_class_and_name(hdl->lock, key, name);
1219 INIT_LIST_HEAD(&hdl->ctrls);
1220 INIT_LIST_HEAD(&hdl->ctrl_refs);
1221 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8;
1222 hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]),
1224 hdl->error = hdl->buckets ? 0 : -ENOMEM;
1225 v4l2_ctrl_handler_init_request(hdl);
1228 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class);
1230 /* Free all controls and control refs */
1231 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl)
1233 struct v4l2_ctrl_ref *ref, *next_ref;
1234 struct v4l2_ctrl *ctrl, *next_ctrl;
1235 struct v4l2_subscribed_event *sev, *next_sev;
1237 if (hdl == NULL || hdl->buckets == NULL)
1240 v4l2_ctrl_handler_free_request(hdl);
1242 mutex_lock(hdl->lock);
1243 /* Free all nodes */
1244 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) {
1245 list_del(&ref->node);
1246 if (ref->p_req_dyn_alloc_elems)
1247 kvfree(ref->p_req.p);
1250 /* Free all controls owned by the handler */
1251 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) {
1252 list_del(&ctrl->node);
1253 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node)
1254 list_del(&sev->node);
1255 kvfree(ctrl->p_dyn);
1258 kvfree(hdl->buckets);
1259 hdl->buckets = NULL;
1262 mutex_unlock(hdl->lock);
1263 mutex_destroy(&hdl->_lock);
1265 EXPORT_SYMBOL(v4l2_ctrl_handler_free);
1267 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer
1268 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing
1269 with applications that do not use the NEXT_CTRL flag.
1271 We just find the n-th private user control. It's O(N), but that should not
1272 be an issue in this particular case. */
1273 static struct v4l2_ctrl_ref *find_private_ref(
1274 struct v4l2_ctrl_handler *hdl, u32 id)
1276 struct v4l2_ctrl_ref *ref;
1278 id -= V4L2_CID_PRIVATE_BASE;
1279 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1280 /* Search for private user controls that are compatible with
1282 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER &&
1283 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) {
1284 if (!ref->ctrl->is_int)
1294 /* Find a control with the given ID. */
1295 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id)
1297 struct v4l2_ctrl_ref *ref;
1300 id &= V4L2_CTRL_ID_MASK;
1302 /* Old-style private controls need special handling */
1303 if (id >= V4L2_CID_PRIVATE_BASE)
1304 return find_private_ref(hdl, id);
1305 bucket = id % hdl->nr_of_buckets;
1307 /* Simple optimization: cache the last control found */
1308 if (hdl->cached && hdl->cached->ctrl->id == id)
1311 /* Not in cache, search the hash */
1312 ref = hdl->buckets ? hdl->buckets[bucket] : NULL;
1313 while (ref && ref->ctrl->id != id)
1317 hdl->cached = ref; /* cache it! */
1321 /* Find a control with the given ID. Take the handler's lock first. */
1322 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id)
1324 struct v4l2_ctrl_ref *ref = NULL;
1327 mutex_lock(hdl->lock);
1328 ref = find_ref(hdl, id);
1329 mutex_unlock(hdl->lock);
1334 /* Find a control with the given ID. */
1335 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id)
1337 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id);
1339 return ref ? ref->ctrl : NULL;
1341 EXPORT_SYMBOL(v4l2_ctrl_find);
1343 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */
1344 int handler_new_ref(struct v4l2_ctrl_handler *hdl,
1345 struct v4l2_ctrl *ctrl,
1346 struct v4l2_ctrl_ref **ctrl_ref,
1347 bool from_other_dev, bool allocate_req)
1349 struct v4l2_ctrl_ref *ref;
1350 struct v4l2_ctrl_ref *new_ref;
1352 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1;
1353 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */
1354 unsigned int size_extra_req = 0;
1360 * Automatically add the control class if it is not yet present and
1361 * the new control is not a compound control.
1363 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES &&
1364 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL)
1365 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0))
1371 if (allocate_req && !ctrl->is_dyn_array)
1372 size_extra_req = ctrl->elems * ctrl->elem_size;
1373 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL);
1375 return handler_set_err(hdl, -ENOMEM);
1376 new_ref->ctrl = ctrl;
1377 new_ref->from_other_dev = from_other_dev;
1379 new_ref->p_req.p = &new_ref[1];
1381 INIT_LIST_HEAD(&new_ref->node);
1383 mutex_lock(hdl->lock);
1385 /* Add immediately at the end of the list if the list is empty, or if
1386 the last element in the list has a lower ID.
1387 This ensures that when elements are added in ascending order the
1388 insertion is an O(1) operation. */
1389 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) {
1390 list_add_tail(&new_ref->node, &hdl->ctrl_refs);
1391 goto insert_in_hash;
1394 /* Find insert position in sorted list */
1395 list_for_each_entry(ref, &hdl->ctrl_refs, node) {
1396 if (ref->ctrl->id < id)
1398 /* Don't add duplicates */
1399 if (ref->ctrl->id == id) {
1403 list_add(&new_ref->node, ref->node.prev);
1408 /* Insert the control node in the hash */
1409 new_ref->next = hdl->buckets[bucket];
1410 hdl->buckets[bucket] = new_ref;
1412 *ctrl_ref = new_ref;
1413 if (ctrl->handler == hdl) {
1414 /* By default each control starts in a cluster of its own.
1415 * new_ref->ctrl is basically a cluster array with one
1416 * element, so that's perfect to use as the cluster pointer.
1417 * But only do this for the handler that owns the control.
1419 ctrl->cluster = &new_ref->ctrl;
1420 ctrl->ncontrols = 1;
1424 mutex_unlock(hdl->lock);
1428 /* Add a new control */
1429 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl,
1430 const struct v4l2_ctrl_ops *ops,
1431 const struct v4l2_ctrl_type_ops *type_ops,
1432 u32 id, const char *name, enum v4l2_ctrl_type type,
1433 s64 min, s64 max, u64 step, s64 def,
1434 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size,
1435 u32 flags, const char * const *qmenu,
1436 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def,
1439 struct v4l2_ctrl *ctrl;
1441 unsigned nr_of_dims = 0;
1444 unsigned tot_ctrl_size;
1452 while (dims && dims[nr_of_dims]) {
1453 elems *= dims[nr_of_dims];
1455 if (nr_of_dims == V4L2_CTRL_MAX_DIMS)
1458 is_array = nr_of_dims > 0;
1460 /* Prefill elem_size for all types handled by std_type_ops */
1461 switch ((u32)type) {
1462 case V4L2_CTRL_TYPE_INTEGER64:
1463 elem_size = sizeof(s64);
1465 case V4L2_CTRL_TYPE_STRING:
1466 elem_size = max + 1;
1468 case V4L2_CTRL_TYPE_U8:
1469 elem_size = sizeof(u8);
1471 case V4L2_CTRL_TYPE_U16:
1472 elem_size = sizeof(u16);
1474 case V4L2_CTRL_TYPE_U32:
1475 elem_size = sizeof(u32);
1477 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE:
1478 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence);
1480 case V4L2_CTRL_TYPE_MPEG2_PICTURE:
1481 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture);
1483 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION:
1484 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation);
1486 case V4L2_CTRL_TYPE_FWHT_PARAMS:
1487 elem_size = sizeof(struct v4l2_ctrl_fwht_params);
1489 case V4L2_CTRL_TYPE_H264_SPS:
1490 elem_size = sizeof(struct v4l2_ctrl_h264_sps);
1492 case V4L2_CTRL_TYPE_H264_PPS:
1493 elem_size = sizeof(struct v4l2_ctrl_h264_pps);
1495 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX:
1496 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix);
1498 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS:
1499 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params);
1501 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS:
1502 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params);
1504 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS:
1505 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights);
1507 case V4L2_CTRL_TYPE_VP8_FRAME:
1508 elem_size = sizeof(struct v4l2_ctrl_vp8_frame);
1510 case V4L2_CTRL_TYPE_HEVC_SPS:
1511 elem_size = sizeof(struct v4l2_ctrl_hevc_sps);
1513 case V4L2_CTRL_TYPE_HEVC_PPS:
1514 elem_size = sizeof(struct v4l2_ctrl_hevc_pps);
1516 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS:
1517 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params);
1519 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX:
1520 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix);
1522 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS:
1523 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params);
1525 case V4L2_CTRL_TYPE_HDR10_CLL_INFO:
1526 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info);
1528 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY:
1529 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display);
1531 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR:
1532 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr);
1534 case V4L2_CTRL_TYPE_VP9_FRAME:
1535 elem_size = sizeof(struct v4l2_ctrl_vp9_frame);
1537 case V4L2_CTRL_TYPE_AREA:
1538 elem_size = sizeof(struct v4l2_area);
1541 if (type < V4L2_CTRL_COMPOUND_TYPES)
1542 elem_size = sizeof(s32);
1547 if (id == 0 || name == NULL || !elem_size ||
1548 id >= V4L2_CID_PRIVATE_BASE ||
1549 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) ||
1550 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) {
1551 handler_set_err(hdl, -ERANGE);
1554 err = check_range(type, min, max, step, def);
1556 handler_set_err(hdl, err);
1560 (type == V4L2_CTRL_TYPE_BUTTON ||
1561 type == V4L2_CTRL_TYPE_CTRL_CLASS)) {
1562 handler_set_err(hdl, -EINVAL);
1565 if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) {
1567 * For now only support this for one-dimensional arrays only.
1569 * This can be relaxed in the future, but this will
1570 * require more effort.
1572 if (nr_of_dims != 1) {
1573 handler_set_err(hdl, -EINVAL);
1576 /* Start with just 1 element */
1580 tot_ctrl_size = elem_size * elems;
1582 if (type == V4L2_CTRL_TYPE_BUTTON)
1583 flags |= V4L2_CTRL_FLAG_WRITE_ONLY |
1584 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE;
1585 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS)
1586 flags |= V4L2_CTRL_FLAG_READ_ONLY;
1587 else if (!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) &&
1588 (type == V4L2_CTRL_TYPE_INTEGER64 ||
1589 type == V4L2_CTRL_TYPE_STRING ||
1590 type >= V4L2_CTRL_COMPOUND_TYPES ||
1592 sz_extra += 2 * tot_ctrl_size;
1594 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const)
1595 sz_extra += elem_size;
1597 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL);
1599 handler_set_err(hdl, -ENOMEM);
1603 INIT_LIST_HEAD(&ctrl->node);
1604 INIT_LIST_HEAD(&ctrl->ev_subs);
1605 ctrl->handler = hdl;
1607 ctrl->type_ops = type_ops ? type_ops : &std_type_ops;
1611 ctrl->flags = flags;
1612 ctrl->minimum = min;
1613 ctrl->maximum = max;
1615 ctrl->default_value = def;
1616 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING;
1617 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string;
1618 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64;
1619 ctrl->is_array = is_array;
1620 ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY);
1621 ctrl->elems = elems;
1622 ctrl->new_elems = elems;
1623 ctrl->nr_of_dims = nr_of_dims;
1625 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0]));
1626 ctrl->elem_size = elem_size;
1627 if (type == V4L2_CTRL_TYPE_MENU)
1628 ctrl->qmenu = qmenu;
1629 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1630 ctrl->qmenu_int = qmenu_int;
1632 ctrl->cur.val = ctrl->val = def;
1635 if (ctrl->is_dyn_array) {
1636 ctrl->p_dyn_alloc_elems = elems;
1637 ctrl->p_dyn = kvzalloc(2 * elems * elem_size, GFP_KERNEL);
1645 if (!ctrl->is_int) {
1646 ctrl->p_new.p = data;
1647 ctrl->p_cur.p = data + tot_ctrl_size;
1649 ctrl->p_new.p = &ctrl->val;
1650 ctrl->p_cur.p = &ctrl->cur.val;
1653 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) {
1654 if (ctrl->is_dyn_array)
1655 ctrl->p_def.p = &ctrl[1];
1657 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size;
1658 memcpy(ctrl->p_def.p, p_def.p_const, elem_size);
1661 for (idx = 0; idx < elems; idx++) {
1662 ctrl->type_ops->init(ctrl, idx, ctrl->p_cur);
1663 ctrl->type_ops->init(ctrl, idx, ctrl->p_new);
1666 if (handler_new_ref(hdl, ctrl, NULL, false, false)) {
1667 kvfree(ctrl->p_dyn);
1671 mutex_lock(hdl->lock);
1672 list_add_tail(&ctrl->node, &hdl->ctrls);
1673 mutex_unlock(hdl->lock);
1677 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl,
1678 const struct v4l2_ctrl_config *cfg, void *priv)
1681 struct v4l2_ctrl *ctrl;
1682 const char *name = cfg->name;
1683 const char * const *qmenu = cfg->qmenu;
1684 const s64 *qmenu_int = cfg->qmenu_int;
1685 enum v4l2_ctrl_type type = cfg->type;
1686 u32 flags = cfg->flags;
1689 u64 step = cfg->step;
1693 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step,
1696 is_menu = (type == V4L2_CTRL_TYPE_MENU ||
1697 type == V4L2_CTRL_TYPE_INTEGER_MENU);
1701 WARN_ON(cfg->menu_skip_mask);
1702 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) {
1703 qmenu = v4l2_ctrl_get_menu(cfg->id);
1704 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) {
1705 handler_set_err(hdl, -EINVAL);
1709 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name,
1711 is_menu ? cfg->menu_skip_mask : step, def,
1712 cfg->dims, cfg->elem_size,
1713 flags, qmenu, qmenu_int, cfg->p_def, priv);
1715 ctrl->is_private = cfg->is_private;
1718 EXPORT_SYMBOL(v4l2_ctrl_new_custom);
1720 /* Helper function for standard non-menu controls */
1721 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl,
1722 const struct v4l2_ctrl_ops *ops,
1723 u32 id, s64 min, s64 max, u64 step, s64 def)
1726 enum v4l2_ctrl_type type;
1729 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1730 if (type == V4L2_CTRL_TYPE_MENU ||
1731 type == V4L2_CTRL_TYPE_INTEGER_MENU ||
1732 type >= V4L2_CTRL_COMPOUND_TYPES) {
1733 handler_set_err(hdl, -EINVAL);
1736 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1737 min, max, step, def, NULL, 0,
1738 flags, NULL, NULL, ptr_null, NULL);
1740 EXPORT_SYMBOL(v4l2_ctrl_new_std);
1742 /* Helper function for standard menu controls */
1743 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl,
1744 const struct v4l2_ctrl_ops *ops,
1745 u32 id, u8 _max, u64 mask, u8 _def)
1747 const char * const *qmenu = NULL;
1748 const s64 *qmenu_int = NULL;
1749 unsigned int qmenu_int_len = 0;
1751 enum v4l2_ctrl_type type;
1758 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1760 if (type == V4L2_CTRL_TYPE_MENU)
1761 qmenu = v4l2_ctrl_get_menu(id);
1762 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU)
1763 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len);
1765 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) {
1766 handler_set_err(hdl, -EINVAL);
1769 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1770 0, max, mask, def, NULL, 0,
1771 flags, qmenu, qmenu_int, ptr_null, NULL);
1773 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu);
1775 /* Helper function for standard menu controls with driver defined menu */
1776 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl,
1777 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max,
1778 u64 mask, u8 _def, const char * const *qmenu)
1780 enum v4l2_ctrl_type type;
1788 /* v4l2_ctrl_new_std_menu_items() should only be called for
1789 * standard controls without a standard menu.
1791 if (v4l2_ctrl_get_menu(id)) {
1792 handler_set_err(hdl, -EINVAL);
1796 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1797 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) {
1798 handler_set_err(hdl, -EINVAL);
1801 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1802 0, max, mask, def, NULL, 0,
1803 flags, qmenu, NULL, ptr_null, NULL);
1806 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items);
1808 /* Helper function for standard compound controls */
1809 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl,
1810 const struct v4l2_ctrl_ops *ops, u32 id,
1811 const union v4l2_ctrl_ptr p_def)
1814 enum v4l2_ctrl_type type;
1816 s64 min, max, step, def;
1818 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1819 if (type < V4L2_CTRL_COMPOUND_TYPES) {
1820 handler_set_err(hdl, -EINVAL);
1823 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1824 min, max, step, def, NULL, 0,
1825 flags, NULL, NULL, p_def, NULL);
1827 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound);
1829 /* Helper function for standard integer menu controls */
1830 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl,
1831 const struct v4l2_ctrl_ops *ops,
1832 u32 id, u8 _max, u8 _def, const s64 *qmenu_int)
1835 enum v4l2_ctrl_type type;
1842 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags);
1843 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) {
1844 handler_set_err(hdl, -EINVAL);
1847 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type,
1848 0, max, 0, def, NULL, 0,
1849 flags, NULL, qmenu_int, ptr_null, NULL);
1851 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu);
1853 /* Add the controls from another handler to our own. */
1854 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl,
1855 struct v4l2_ctrl_handler *add,
1856 bool (*filter)(const struct v4l2_ctrl *ctrl),
1857 bool from_other_dev)
1859 struct v4l2_ctrl_ref *ref;
1862 /* Do nothing if either handler is NULL or if they are the same */
1863 if (!hdl || !add || hdl == add)
1867 mutex_lock(add->lock);
1868 list_for_each_entry(ref, &add->ctrl_refs, node) {
1869 struct v4l2_ctrl *ctrl = ref->ctrl;
1871 /* Skip handler-private controls. */
1872 if (ctrl->is_private)
1874 /* And control classes */
1875 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
1877 /* Filter any unwanted controls */
1878 if (filter && !filter(ctrl))
1880 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false);
1884 mutex_unlock(add->lock);
1887 EXPORT_SYMBOL(v4l2_ctrl_add_handler);
1889 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl)
1891 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX)
1893 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX)
1896 case V4L2_CID_AUDIO_MUTE:
1897 case V4L2_CID_AUDIO_VOLUME:
1898 case V4L2_CID_AUDIO_BALANCE:
1899 case V4L2_CID_AUDIO_BASS:
1900 case V4L2_CID_AUDIO_TREBLE:
1901 case V4L2_CID_AUDIO_LOUDNESS:
1908 EXPORT_SYMBOL(v4l2_ctrl_radio_filter);
1910 /* Cluster controls */
1911 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls)
1913 bool has_volatiles = false;
1916 /* The first control is the master control and it must not be NULL */
1917 if (WARN_ON(ncontrols == 0 || controls[0] == NULL))
1920 for (i = 0; i < ncontrols; i++) {
1922 controls[i]->cluster = controls;
1923 controls[i]->ncontrols = ncontrols;
1924 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE)
1925 has_volatiles = true;
1928 controls[0]->has_volatiles = has_volatiles;
1930 EXPORT_SYMBOL(v4l2_ctrl_cluster);
1932 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls,
1933 u8 manual_val, bool set_volatile)
1935 struct v4l2_ctrl *master = controls[0];
1939 v4l2_ctrl_cluster(ncontrols, controls);
1940 WARN_ON(ncontrols <= 1);
1941 WARN_ON(manual_val < master->minimum || manual_val > master->maximum);
1942 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl));
1943 master->is_auto = true;
1944 master->has_volatiles = set_volatile;
1945 master->manual_mode_value = manual_val;
1946 master->flags |= V4L2_CTRL_FLAG_UPDATE;
1948 if (!is_cur_manual(master))
1949 flag = V4L2_CTRL_FLAG_INACTIVE |
1950 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0);
1952 for (i = 1; i < ncontrols; i++)
1954 controls[i]->flags |= flag;
1956 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster);
1959 * Obtain the current volatile values of an autocluster and mark them
1962 void update_from_auto_cluster(struct v4l2_ctrl *master)
1966 for (i = 1; i < master->ncontrols; i++)
1967 cur_to_new(master->cluster[i]);
1968 if (!call_op(master, g_volatile_ctrl))
1969 for (i = 1; i < master->ncontrols; i++)
1970 if (master->cluster[i])
1971 master->cluster[i]->is_new = 1;
1975 * Return non-zero if one or more of the controls in the cluster has a new
1976 * value that differs from the current value.
1978 static int cluster_changed(struct v4l2_ctrl *master)
1980 bool changed = false;
1984 for (i = 0; i < master->ncontrols; i++) {
1985 struct v4l2_ctrl *ctrl = master->cluster[i];
1986 bool ctrl_changed = false;
1991 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) {
1993 ctrl_changed = true;
1997 * Set has_changed to false to avoid generating
1998 * the event V4L2_EVENT_CTRL_CH_VALUE
2000 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) {
2001 ctrl->has_changed = false;
2005 if (ctrl->elems != ctrl->new_elems)
2006 ctrl_changed = true;
2008 for (idx = 0; !ctrl_changed && idx < ctrl->elems; idx++)
2009 ctrl_changed = !ctrl->type_ops->equal(ctrl, idx,
2010 ctrl->p_cur, ctrl->p_new);
2011 ctrl->has_changed = ctrl_changed;
2012 changed |= ctrl->has_changed;
2018 * Core function that calls try/s_ctrl and ensures that the new value is
2019 * copied to the current value on a set.
2020 * Must be called with ctrl->handler->lock held.
2022 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master,
2023 bool set, u32 ch_flags)
2030 * Go through the cluster and either validate the new value or
2031 * (if no new value was set), copy the current value to the new
2032 * value, ensuring a consistent view for the control ops when
2035 for (i = 0; i < master->ncontrols; i++) {
2036 struct v4l2_ctrl *ctrl = master->cluster[i];
2041 if (!ctrl->is_new) {
2046 * Check again: it may have changed since the
2047 * previous check in try_or_set_ext_ctrls().
2049 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED))
2053 ret = call_op(master, try_ctrl);
2055 /* Don't set if there is no change */
2056 if (ret || !set || !cluster_changed(master))
2058 ret = call_op(master, s_ctrl);
2062 /* If OK, then make the new values permanent. */
2063 update_flag = is_cur_manual(master) != is_new_manual(master);
2065 for (i = 0; i < master->ncontrols; i++) {
2067 * If we switch from auto to manual mode, and this cluster
2068 * contains volatile controls, then all non-master controls
2069 * have to be marked as changed. The 'new' value contains
2070 * the volatile value (obtained by update_from_auto_cluster),
2071 * which now has to become the current value.
2073 if (i && update_flag && is_new_manual(master) &&
2074 master->has_volatiles && master->cluster[i])
2075 master->cluster[i]->has_changed = true;
2077 new_to_cur(fh, master->cluster[i], ch_flags |
2078 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0));
2083 /* Activate/deactivate a control. */
2084 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active)
2086 /* invert since the actual flag is called 'inactive' */
2087 bool inactive = !active;
2094 /* set V4L2_CTRL_FLAG_INACTIVE */
2095 old = test_and_set_bit(4, &ctrl->flags);
2097 /* clear V4L2_CTRL_FLAG_INACTIVE */
2098 old = test_and_clear_bit(4, &ctrl->flags);
2099 if (old != inactive)
2100 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
2102 EXPORT_SYMBOL(v4l2_ctrl_activate);
2104 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed)
2111 lockdep_assert_held(ctrl->handler->lock);
2114 /* set V4L2_CTRL_FLAG_GRABBED */
2115 old = test_and_set_bit(1, &ctrl->flags);
2117 /* clear V4L2_CTRL_FLAG_GRABBED */
2118 old = test_and_clear_bit(1, &ctrl->flags);
2120 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS);
2122 EXPORT_SYMBOL(__v4l2_ctrl_grab);
2124 /* Call s_ctrl for all controls owned by the handler */
2125 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
2127 struct v4l2_ctrl *ctrl;
2133 lockdep_assert_held(hdl->lock);
2135 list_for_each_entry(ctrl, &hdl->ctrls, node)
2138 list_for_each_entry(ctrl, &hdl->ctrls, node) {
2139 struct v4l2_ctrl *master = ctrl->cluster[0];
2142 /* Skip if this control was already handled by a cluster. */
2143 /* Skip button controls and read-only controls. */
2144 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON ||
2145 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY))
2148 for (i = 0; i < master->ncontrols; i++) {
2149 if (master->cluster[i]) {
2150 cur_to_new(master->cluster[i]);
2151 master->cluster[i]->is_new = 1;
2152 master->cluster[i]->done = true;
2155 ret = call_op(master, s_ctrl);
2162 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup);
2164 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl)
2171 mutex_lock(hdl->lock);
2172 ret = __v4l2_ctrl_handler_setup(hdl);
2173 mutex_unlock(hdl->lock);
2177 EXPORT_SYMBOL(v4l2_ctrl_handler_setup);
2179 /* Log the control name and value */
2180 static void log_ctrl(const struct v4l2_ctrl *ctrl,
2181 const char *prefix, const char *colon)
2183 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY))
2185 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS)
2188 pr_info("%s%s%s: ", prefix, colon, ctrl->name);
2190 ctrl->type_ops->log(ctrl);
2192 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE |
2193 V4L2_CTRL_FLAG_GRABBED |
2194 V4L2_CTRL_FLAG_VOLATILE)) {
2195 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE)
2196 pr_cont(" inactive");
2197 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)
2198 pr_cont(" grabbed");
2199 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE)
2200 pr_cont(" volatile");
2205 /* Log all controls owned by the handler */
2206 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl,
2209 struct v4l2_ctrl *ctrl;
2210 const char *colon = "";
2217 len = strlen(prefix);
2218 if (len && prefix[len - 1] != ' ')
2220 mutex_lock(hdl->lock);
2221 list_for_each_entry(ctrl, &hdl->ctrls, node)
2222 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED))
2223 log_ctrl(ctrl, prefix, colon);
2224 mutex_unlock(hdl->lock);
2226 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status);
2228 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl,
2229 const struct v4l2_ctrl_ops *ctrl_ops,
2230 const struct v4l2_fwnode_device_properties *p)
2232 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) {
2233 u32 orientation_ctrl;
2235 switch (p->orientation) {
2236 case V4L2_FWNODE_ORIENTATION_FRONT:
2237 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT;
2239 case V4L2_FWNODE_ORIENTATION_BACK:
2240 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK;
2242 case V4L2_FWNODE_ORIENTATION_EXTERNAL:
2243 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL;
2248 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops,
2249 V4L2_CID_CAMERA_ORIENTATION,
2250 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0,
2255 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) {
2256 if (!v4l2_ctrl_new_std(hdl, ctrl_ops,
2257 V4L2_CID_CAMERA_SENSOR_ROTATION,
2258 p->rotation, p->rotation, 1,
2265 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties);