1 // SPDX-License-Identifier: GPL-2.0-only
5 * Development of this code funded by Astaro AG (http://www.astaro.com/)
8 #include <linux/kernel.h>
9 #include <linux/init.h>
10 #include <linux/module.h>
11 #include <linux/netlink.h>
12 #include <linux/netfilter.h>
13 #include <linux/netfilter/nf_tables.h>
14 #include <net/netfilter/nf_tables_core.h>
15 #include <net/netfilter/nf_tables.h>
16 #include <net/netfilter/nf_tables_offload.h>
21 enum nft_bitwise_ops op:8;
28 static void nft_bitwise_eval_bool(u32 *dst, const u32 *src,
29 const struct nft_bitwise *priv)
33 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++)
34 dst[i] = (src[i] & priv->mask.data[i]) ^ priv->xor.data[i];
37 static void nft_bitwise_eval_lshift(u32 *dst, const u32 *src,
38 const struct nft_bitwise *priv)
40 u32 shift = priv->data.data[0];
44 for (i = DIV_ROUND_UP(priv->len, sizeof(u32)); i > 0; i--) {
45 dst[i - 1] = (src[i - 1] << shift) | carry;
46 carry = src[i - 1] >> (BITS_PER_TYPE(u32) - shift);
50 static void nft_bitwise_eval_rshift(u32 *dst, const u32 *src,
51 const struct nft_bitwise *priv)
53 u32 shift = priv->data.data[0];
57 for (i = 0; i < DIV_ROUND_UP(priv->len, sizeof(u32)); i++) {
58 dst[i] = carry | (src[i] >> shift);
59 carry = src[i] << (BITS_PER_TYPE(u32) - shift);
63 void nft_bitwise_eval(const struct nft_expr *expr,
64 struct nft_regs *regs, const struct nft_pktinfo *pkt)
66 const struct nft_bitwise *priv = nft_expr_priv(expr);
67 const u32 *src = ®s->data[priv->sreg];
68 u32 *dst = ®s->data[priv->dreg];
71 case NFT_BITWISE_BOOL:
72 nft_bitwise_eval_bool(dst, src, priv);
74 case NFT_BITWISE_LSHIFT:
75 nft_bitwise_eval_lshift(dst, src, priv);
77 case NFT_BITWISE_RSHIFT:
78 nft_bitwise_eval_rshift(dst, src, priv);
83 static const struct nla_policy nft_bitwise_policy[NFTA_BITWISE_MAX + 1] = {
84 [NFTA_BITWISE_SREG] = { .type = NLA_U32 },
85 [NFTA_BITWISE_DREG] = { .type = NLA_U32 },
86 [NFTA_BITWISE_LEN] = { .type = NLA_U32 },
87 [NFTA_BITWISE_MASK] = { .type = NLA_NESTED },
88 [NFTA_BITWISE_XOR] = { .type = NLA_NESTED },
89 [NFTA_BITWISE_OP] = { .type = NLA_U32 },
90 [NFTA_BITWISE_DATA] = { .type = NLA_NESTED },
93 static int nft_bitwise_init_bool(struct nft_bitwise *priv,
94 const struct nlattr *const tb[])
96 struct nft_data_desc mask = {
97 .type = NFT_DATA_VALUE,
98 .size = sizeof(priv->mask),
101 struct nft_data_desc xor = {
102 .type = NFT_DATA_VALUE,
103 .size = sizeof(priv->xor),
108 if (tb[NFTA_BITWISE_DATA])
111 if (!tb[NFTA_BITWISE_MASK] ||
112 !tb[NFTA_BITWISE_XOR])
115 err = nft_data_init(NULL, &priv->mask, &mask, tb[NFTA_BITWISE_MASK]);
119 err = nft_data_init(NULL, &priv->xor, &xor, tb[NFTA_BITWISE_XOR]);
126 nft_data_release(&priv->mask, mask.type);
131 static int nft_bitwise_init_shift(struct nft_bitwise *priv,
132 const struct nlattr *const tb[])
134 struct nft_data_desc desc = {
135 .type = NFT_DATA_VALUE,
136 .size = sizeof(priv->data),
141 if (tb[NFTA_BITWISE_MASK] ||
142 tb[NFTA_BITWISE_XOR])
145 if (!tb[NFTA_BITWISE_DATA])
148 err = nft_data_init(NULL, &priv->data, &desc, tb[NFTA_BITWISE_DATA]);
152 if (priv->data.data[0] >= BITS_PER_TYPE(u32)) {
153 nft_data_release(&priv->data, desc.type);
160 static int nft_bitwise_init(const struct nft_ctx *ctx,
161 const struct nft_expr *expr,
162 const struct nlattr * const tb[])
164 struct nft_bitwise *priv = nft_expr_priv(expr);
168 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len);
174 err = nft_parse_register_load(tb[NFTA_BITWISE_SREG], &priv->sreg,
179 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG],
180 &priv->dreg, NULL, NFT_DATA_VALUE,
185 if (tb[NFTA_BITWISE_OP]) {
186 priv->op = ntohl(nla_get_be32(tb[NFTA_BITWISE_OP]));
188 case NFT_BITWISE_BOOL:
189 case NFT_BITWISE_LSHIFT:
190 case NFT_BITWISE_RSHIFT:
196 priv->op = NFT_BITWISE_BOOL;
200 case NFT_BITWISE_BOOL:
201 err = nft_bitwise_init_bool(priv, tb);
203 case NFT_BITWISE_LSHIFT:
204 case NFT_BITWISE_RSHIFT:
205 err = nft_bitwise_init_shift(priv, tb);
212 static int nft_bitwise_dump_bool(struct sk_buff *skb,
213 const struct nft_bitwise *priv)
215 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &priv->mask,
216 NFT_DATA_VALUE, priv->len) < 0)
219 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &priv->xor,
220 NFT_DATA_VALUE, priv->len) < 0)
226 static int nft_bitwise_dump_shift(struct sk_buff *skb,
227 const struct nft_bitwise *priv)
229 if (nft_data_dump(skb, NFTA_BITWISE_DATA, &priv->data,
230 NFT_DATA_VALUE, sizeof(u32)) < 0)
235 static int nft_bitwise_dump(struct sk_buff *skb,
236 const struct nft_expr *expr, bool reset)
238 const struct nft_bitwise *priv = nft_expr_priv(expr);
241 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg))
243 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg))
245 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(priv->len)))
247 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(priv->op)))
251 case NFT_BITWISE_BOOL:
252 err = nft_bitwise_dump_bool(skb, priv);
254 case NFT_BITWISE_LSHIFT:
255 case NFT_BITWISE_RSHIFT:
256 err = nft_bitwise_dump_shift(skb, priv);
263 static struct nft_data zero;
265 static int nft_bitwise_offload(struct nft_offload_ctx *ctx,
266 struct nft_flow_rule *flow,
267 const struct nft_expr *expr)
269 const struct nft_bitwise *priv = nft_expr_priv(expr);
270 struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
272 if (priv->op != NFT_BITWISE_BOOL)
275 if (memcmp(&priv->xor, &zero, sizeof(priv->xor)) ||
276 priv->sreg != priv->dreg || priv->len != reg->len)
279 memcpy(®->mask, &priv->mask, sizeof(priv->mask));
284 static bool nft_bitwise_reduce(struct nft_regs_track *track,
285 const struct nft_expr *expr)
287 const struct nft_bitwise *priv = nft_expr_priv(expr);
288 const struct nft_bitwise *bitwise;
289 unsigned int regcount;
293 if (!track->regs[priv->sreg].selector)
296 bitwise = nft_expr_priv(track->regs[priv->dreg].selector);
297 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector &&
298 track->regs[priv->sreg].num_reg == 0 &&
299 track->regs[priv->dreg].bitwise &&
300 track->regs[priv->dreg].bitwise->ops == expr->ops &&
301 priv->sreg == bitwise->sreg &&
302 priv->dreg == bitwise->dreg &&
303 priv->op == bitwise->op &&
304 priv->len == bitwise->len &&
305 !memcmp(&priv->mask, &bitwise->mask, sizeof(priv->mask)) &&
306 !memcmp(&priv->xor, &bitwise->xor, sizeof(priv->xor)) &&
307 !memcmp(&priv->data, &bitwise->data, sizeof(priv->data))) {
312 if (track->regs[priv->sreg].bitwise ||
313 track->regs[priv->sreg].num_reg != 0) {
314 nft_reg_track_cancel(track, priv->dreg, priv->len);
318 if (priv->sreg != priv->dreg) {
319 nft_reg_track_update(track, track->regs[priv->sreg].selector,
320 priv->dreg, priv->len);
324 regcount = DIV_ROUND_UP(priv->len, NFT_REG32_SIZE);
325 for (i = 0; i < regcount; i++, dreg++)
326 track->regs[priv->dreg].bitwise = expr;
331 static const struct nft_expr_ops nft_bitwise_ops = {
332 .type = &nft_bitwise_type,
333 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise)),
334 .eval = nft_bitwise_eval,
335 .init = nft_bitwise_init,
336 .dump = nft_bitwise_dump,
337 .reduce = nft_bitwise_reduce,
338 .offload = nft_bitwise_offload,
342 nft_bitwise_extract_u32_data(const struct nlattr * const tb, u32 *out)
344 struct nft_data data;
345 struct nft_data_desc desc = {
346 .type = NFT_DATA_VALUE,
347 .size = sizeof(data),
352 err = nft_data_init(NULL, &data, &desc, tb);
361 static int nft_bitwise_fast_init(const struct nft_ctx *ctx,
362 const struct nft_expr *expr,
363 const struct nlattr * const tb[])
365 struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
368 err = nft_parse_register_load(tb[NFTA_BITWISE_SREG], &priv->sreg,
373 err = nft_parse_register_store(ctx, tb[NFTA_BITWISE_DREG], &priv->dreg,
374 NULL, NFT_DATA_VALUE, sizeof(u32));
378 if (tb[NFTA_BITWISE_DATA])
381 if (!tb[NFTA_BITWISE_MASK] ||
382 !tb[NFTA_BITWISE_XOR])
385 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_MASK], &priv->mask);
389 err = nft_bitwise_extract_u32_data(tb[NFTA_BITWISE_XOR], &priv->xor);
397 nft_bitwise_fast_dump(struct sk_buff *skb,
398 const struct nft_expr *expr, bool reset)
400 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
401 struct nft_data data;
403 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg))
405 if (nft_dump_register(skb, NFTA_BITWISE_DREG, priv->dreg))
407 if (nla_put_be32(skb, NFTA_BITWISE_LEN, htonl(sizeof(u32))))
409 if (nla_put_be32(skb, NFTA_BITWISE_OP, htonl(NFT_BITWISE_BOOL)))
412 data.data[0] = priv->mask;
413 if (nft_data_dump(skb, NFTA_BITWISE_MASK, &data,
414 NFT_DATA_VALUE, sizeof(u32)) < 0)
417 data.data[0] = priv->xor;
418 if (nft_data_dump(skb, NFTA_BITWISE_XOR, &data,
419 NFT_DATA_VALUE, sizeof(u32)) < 0)
425 static int nft_bitwise_fast_offload(struct nft_offload_ctx *ctx,
426 struct nft_flow_rule *flow,
427 const struct nft_expr *expr)
429 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
430 struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
432 if (priv->xor || priv->sreg != priv->dreg || reg->len != sizeof(u32))
435 reg->mask.data[0] = priv->mask;
439 static bool nft_bitwise_fast_reduce(struct nft_regs_track *track,
440 const struct nft_expr *expr)
442 const struct nft_bitwise_fast_expr *priv = nft_expr_priv(expr);
443 const struct nft_bitwise_fast_expr *bitwise;
445 if (!track->regs[priv->sreg].selector)
448 bitwise = nft_expr_priv(track->regs[priv->dreg].selector);
449 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector &&
450 track->regs[priv->dreg].bitwise &&
451 track->regs[priv->dreg].bitwise->ops == expr->ops &&
452 priv->sreg == bitwise->sreg &&
453 priv->dreg == bitwise->dreg &&
454 priv->mask == bitwise->mask &&
455 priv->xor == bitwise->xor) {
460 if (track->regs[priv->sreg].bitwise) {
461 nft_reg_track_cancel(track, priv->dreg, NFT_REG32_SIZE);
465 if (priv->sreg != priv->dreg) {
466 track->regs[priv->dreg].selector =
467 track->regs[priv->sreg].selector;
469 track->regs[priv->dreg].bitwise = expr;
474 const struct nft_expr_ops nft_bitwise_fast_ops = {
475 .type = &nft_bitwise_type,
476 .size = NFT_EXPR_SIZE(sizeof(struct nft_bitwise_fast_expr)),
477 .eval = NULL, /* inlined */
478 .init = nft_bitwise_fast_init,
479 .dump = nft_bitwise_fast_dump,
480 .reduce = nft_bitwise_fast_reduce,
481 .offload = nft_bitwise_fast_offload,
484 static const struct nft_expr_ops *
485 nft_bitwise_select_ops(const struct nft_ctx *ctx,
486 const struct nlattr * const tb[])
491 if (!tb[NFTA_BITWISE_LEN] ||
492 !tb[NFTA_BITWISE_SREG] ||
493 !tb[NFTA_BITWISE_DREG])
494 return ERR_PTR(-EINVAL);
496 err = nft_parse_u32_check(tb[NFTA_BITWISE_LEN], U8_MAX, &len);
500 if (len != sizeof(u32))
501 return &nft_bitwise_ops;
503 if (tb[NFTA_BITWISE_OP] &&
504 ntohl(nla_get_be32(tb[NFTA_BITWISE_OP])) != NFT_BITWISE_BOOL)
505 return &nft_bitwise_ops;
507 return &nft_bitwise_fast_ops;
510 struct nft_expr_type nft_bitwise_type __read_mostly = {
512 .select_ops = nft_bitwise_select_ops,
513 .policy = nft_bitwise_policy,
514 .maxattr = NFTA_BITWISE_MAX,
515 .owner = THIS_MODULE,
518 bool nft_expr_reduce_bitwise(struct nft_regs_track *track,
519 const struct nft_expr *expr)
521 const struct nft_expr *last = track->last;
522 const struct nft_expr *next;
527 next = nft_expr_next(expr);
528 if (next->ops == &nft_bitwise_ops)
529 return nft_bitwise_reduce(track, next);
530 else if (next->ops == &nft_bitwise_fast_ops)
531 return nft_bitwise_fast_reduce(track, next);
535 EXPORT_SYMBOL_GPL(nft_expr_reduce_bitwise);