Lines Matching refs:tb
227 struct rtattr *tb[TCA_FQ_MAX + 1];
240 parse_rtattr_nested(tb, TCA_FQ_MAX, opt);
242 if (tb[TCA_FQ_PLIMIT] &&
243 RTA_PAYLOAD(tb[TCA_FQ_PLIMIT]) >= sizeof(__u32)) {
244 plimit = rta_getattr_u32(tb[TCA_FQ_PLIMIT]);
247 if (tb[TCA_FQ_FLOW_PLIMIT] &&
248 RTA_PAYLOAD(tb[TCA_FQ_FLOW_PLIMIT]) >= sizeof(__u32)) {
249 flow_plimit = rta_getattr_u32(tb[TCA_FQ_FLOW_PLIMIT]);
252 if (tb[TCA_FQ_BUCKETS_LOG] &&
253 RTA_PAYLOAD(tb[TCA_FQ_BUCKETS_LOG]) >= sizeof(__u32)) {
254 buckets_log = rta_getattr_u32(tb[TCA_FQ_BUCKETS_LOG]);
257 if (tb[TCA_FQ_ORPHAN_MASK] &&
258 RTA_PAYLOAD(tb[TCA_FQ_ORPHAN_MASK]) >= sizeof(__u32)) {
259 orphan_mask = rta_getattr_u32(tb[TCA_FQ_ORPHAN_MASK]);
262 if (tb[TCA_FQ_RATE_ENABLE] &&
263 RTA_PAYLOAD(tb[TCA_FQ_RATE_ENABLE]) >= sizeof(int)) {
264 pacing = rta_getattr_u32(tb[TCA_FQ_RATE_ENABLE]);
268 if (tb[TCA_FQ_QUANTUM] &&
269 RTA_PAYLOAD(tb[TCA_FQ_QUANTUM]) >= sizeof(__u32)) {
270 quantum = rta_getattr_u32(tb[TCA_FQ_QUANTUM]);
273 if (tb[TCA_FQ_INITIAL_QUANTUM] &&
274 RTA_PAYLOAD(tb[TCA_FQ_INITIAL_QUANTUM]) >= sizeof(__u32)) {
275 quantum = rta_getattr_u32(tb[TCA_FQ_INITIAL_QUANTUM]);
278 if (tb[TCA_FQ_FLOW_MAX_RATE] &&
279 RTA_PAYLOAD(tb[TCA_FQ_FLOW_MAX_RATE]) >= sizeof(__u32)) {
280 rate = rta_getattr_u32(tb[TCA_FQ_FLOW_MAX_RATE]);
285 if (tb[TCA_FQ_FLOW_DEFAULT_RATE] &&
286 RTA_PAYLOAD(tb[TCA_FQ_FLOW_DEFAULT_RATE]) >= sizeof(__u32)) {
287 rate = rta_getattr_u32(tb[TCA_FQ_FLOW_DEFAULT_RATE]);
292 if (tb[TCA_FQ_LOW_RATE_THRESHOLD] &&
293 RTA_PAYLOAD(tb[TCA_FQ_LOW_RATE_THRESHOLD]) >= sizeof(__u32)) {
294 rate = rta_getattr_u32(tb[TCA_FQ_LOW_RATE_THRESHOLD]);
299 if (tb[TCA_FQ_FLOW_REFILL_DELAY] &&
300 RTA_PAYLOAD(tb[TCA_FQ_FLOW_REFILL_DELAY]) >= sizeof(__u32)) {
301 refill_delay = rta_getattr_u32(tb[TCA_FQ_FLOW_REFILL_DELAY]);