Home
last modified time | relevance | path

Searched refs:cap_mask (Results 1 – 25 of 129) sorted by relevance

123456

/linux-6.12.1/tools/testing/selftests/bpf/prog_tests/
Ddeny_namespace.c51 __u32 cap_mask = 1ULL << CAP_SYS_ADMIN; in test_userns_create_bpf() local
54 cap_enable_effective(cap_mask, &old_caps); in test_userns_create_bpf()
58 cap_disable_effective(cap_mask, &old_caps); in test_userns_create_bpf()
62 if (cap_mask & old_caps) in test_userns_create_bpf()
63 cap_enable_effective(cap_mask, NULL); in test_userns_create_bpf()
68 __u32 cap_mask = 1ULL << CAP_SYS_ADMIN; in test_unpriv_userns_create_no_bpf() local
71 cap_disable_effective(cap_mask, &old_caps); in test_unpriv_userns_create_no_bpf()
75 if (cap_mask & old_caps) in test_unpriv_userns_create_no_bpf()
76 cap_enable_effective(cap_mask, NULL); in test_unpriv_userns_create_no_bpf()
/linux-6.12.1/drivers/dma/
Ddmaengine.c328 if (!dma_has_cap(cap, device->cap_mask) || in min_chan()
329 dma_has_cap(DMA_PRIVATE, device->cap_mask)) in min_chan()
374 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_channel_rebalance()
397 bitmap_and(has.bits, want->bits, device->cap_mask.bits, in dma_device_satisfies_mask()
476 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) in dma_chan_get()
561 if (dma_has_cap(DMA_PRIVATE, device->cap_mask)) in dma_issue_pending_all()
581 if (!(test_bit(DMA_SLAVE, device->cap_mask.bits) || in dma_get_slave_caps()
582 test_bit(DMA_CYCLIC, device->cap_mask.bits))) in dma_get_slave_caps()
632 if (dev->chancnt > 1 && !dma_has_cap(DMA_PRIVATE, dev->cap_mask)) in private_candidate()
669 dma_cap_set(DMA_PRIVATE, device->cap_mask); in find_candidate()
[all …]
Dmv_xor.c1036 int idx, dma_cap_mask_t cap_mask, int irq) in mv_xor_channel_add() argument
1078 dma_dev->cap_mask = cap_mask; in mv_xor_channel_add()
1089 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in mv_xor_channel_add()
1091 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in mv_xor_channel_add()
1093 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add()
1127 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { in mv_xor_channel_add()
1134 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in mv_xor_channel_add()
1143 dma_has_cap(DMA_XOR, dma_dev->cap_mask) ? "xor " : "", in mv_xor_channel_add()
1144 dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask) ? "cpy " : "", in mv_xor_channel_add()
1145 dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask) ? "intr " : ""); in mv_xor_channel_add()
[all …]
Dat_hdmac.c518 dma_cap_mask_t cap_mask; member
1947 dma_cap_set(DMA_MEMCPY, at91sam9rl_config.cap_mask); in at_dma_probe()
1948 dma_cap_set(DMA_INTERLEAVE, at91sam9g45_config.cap_mask); in at_dma_probe()
1949 dma_cap_set(DMA_MEMCPY, at91sam9g45_config.cap_mask); in at_dma_probe()
1950 dma_cap_set(DMA_MEMSET, at91sam9g45_config.cap_mask); in at_dma_probe()
1951 dma_cap_set(DMA_MEMSET_SG, at91sam9g45_config.cap_mask); in at_dma_probe()
1952 dma_cap_set(DMA_PRIVATE, at91sam9g45_config.cap_mask); in at_dma_probe()
1953 dma_cap_set(DMA_SLAVE, at91sam9g45_config.cap_mask); in at_dma_probe()
1975 atdma->dma_device.cap_mask = plat_dat->cap_mask; in at_dma_probe()
2043 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask)) in at_dma_probe()
[all …]
Dbcm-sba-raid.c1553 dma_cap_zero(dma_dev->cap_mask); in sba_async_register()
1554 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in sba_async_register()
1555 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in sba_async_register()
1556 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in sba_async_register()
1557 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in sba_async_register()
1573 if (dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask)) in sba_async_register()
1577 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in sba_async_register()
1581 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in sba_async_register()
1587 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in sba_async_register()
1605 dma_has_cap(DMA_INTERRUPT, dma_dev->cap_mask) ? "interrupt " : "", in sba_async_register()
[all …]
Dmcf-edma-main.c213 dma_cap_set(DMA_PRIVATE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
214 dma_cap_set(DMA_SLAVE, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
215 dma_cap_set(DMA_CYCLIC, mcf_edma->dma_dev.cap_mask); in mcf_edma_probe()
Dxgene-dma.c1489 dma_cap_zero(dma_dev->cap_mask); in xgene_dma_set_caps()
1504 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in xgene_dma_set_caps()
1505 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps()
1508 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps()
1518 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in xgene_dma_set_caps()
1524 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in xgene_dma_set_caps()
1568 dma_has_cap(DMA_XOR, dma_dev->cap_mask) ? "XOR " : "", in xgene_dma_async_register()
1569 dma_has_cap(DMA_PQ, dma_dev->cap_mask) ? "PQ " : ""); in xgene_dma_async_register()
Dmv_xor_v2.c814 dma_cap_zero(dma_dev->cap_mask); in mv_xor_v2_probe()
815 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in mv_xor_v2_probe()
816 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in mv_xor_v2_probe()
817 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in mv_xor_v2_probe()
Dep93xx_dma.c1450 dma_cap_mask_t mask = edma->dma_dev.cap_mask; in ep93xx_m2p_dma_of_xlate()
1481 dma_cap_mask_t mask = edma->dma_dev.cap_mask; in ep93xx_m2m_dma_of_xlate()
1515 dma_cap_zero(dma_dev->cap_mask); in ep93xx_dma_probe()
1516 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in ep93xx_dma_probe()
1517 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in ep93xx_dma_probe()
1533 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in ep93xx_dma_probe()
1541 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in ep93xx_dma_probe()
Ddmatest.c858 dev->cap_mask) && in dmatest_func()
1037 if (dma_has_cap(DMA_COMPLETION_NO_ORDER, dma_dev->cap_mask) && in dmatest_add_channel()
1043 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { in dmatest_add_channel()
1050 if (dma_has_cap(DMA_MEMSET, dma_dev->cap_mask)) { in dmatest_add_channel()
1057 if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { in dmatest_add_channel()
1061 if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { in dmatest_add_channel()
/linux-6.12.1/drivers/dma/ioat/
Dsysfs.c22 dma_has_cap(DMA_PQ, dma->cap_mask) ? " pq" : "", in cap_show()
23 dma_has_cap(DMA_PQ_VAL, dma->cap_mask) ? " pq_val" : "", in cap_show()
24 dma_has_cap(DMA_XOR, dma->cap_mask) ? " xor" : "", in cap_show()
25 dma_has_cap(DMA_XOR_VAL, dma->cap_mask) ? " xor_val" : "", in cap_show()
26 dma_has_cap(DMA_INTERRUPT, dma->cap_mask) ? " intr" : ""); in cap_show()
Dinit.c512 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in ioat_probe()
795 if (!dma_has_cap(DMA_XOR, dma->cap_mask)) in ioat_xor_val_self_test()
903 if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask)) in ioat_xor_val_self_test()
1092 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in ioat3_dma_probe()
1108 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe()
1111 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe()
1119 dma_cap_set(DMA_PQ, dma->cap_mask); in ioat3_dma_probe()
1120 dma_cap_set(DMA_PQ_VAL, dma->cap_mask); in ioat3_dma_probe()
1130 dma_cap_set(DMA_XOR, dma->cap_mask); in ioat3_dma_probe()
1131 dma_cap_set(DMA_XOR_VAL, dma->cap_mask); in ioat3_dma_probe()
[all …]
/linux-6.12.1/arch/x86/kernel/cpu/
Dtransmeta.c24 unsigned int cap_mask, uk, max, dummy; in init_transmeta() local
86 rdmsr(0x80860004, cap_mask, uk); in init_transmeta()
89 wrmsr(0x80860004, cap_mask, uk); in init_transmeta()
/linux-6.12.1/tools/testing/selftests/bpf/progs/
Dtest_deny_namespace.c20 __u64 cap_mask = 1ULL << CAP_SYS_ADMIN; in BPF_PROG() local
26 if (caps.val & cap_mask) in BPF_PROG()
/linux-6.12.1/arch/arm/plat-orion/
Dcommon.c614 dma_cap_set(DMA_MEMCPY, orion_xor0_channels_data[0].cap_mask); in orion_xor0_init()
615 dma_cap_set(DMA_XOR, orion_xor0_channels_data[0].cap_mask); in orion_xor0_init()
617 dma_cap_set(DMA_MEMCPY, orion_xor0_channels_data[1].cap_mask); in orion_xor0_init()
618 dma_cap_set(DMA_XOR, orion_xor0_channels_data[1].cap_mask); in orion_xor0_init()
675 dma_cap_set(DMA_MEMCPY, orion_xor1_channels_data[0].cap_mask); in orion_xor1_init()
676 dma_cap_set(DMA_XOR, orion_xor1_channels_data[0].cap_mask); in orion_xor1_init()
678 dma_cap_set(DMA_MEMCPY, orion_xor1_channels_data[1].cap_mask); in orion_xor1_init()
679 dma_cap_set(DMA_XOR, orion_xor1_channels_data[1].cap_mask); in orion_xor1_init()
/linux-6.12.1/crypto/async_tx/
Dasync_tx.c50 dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) in __async_tx_find_channel()
89 if (dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_tx_channel_switch()
235 if (device && !dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_trigger_callback()
/linux-6.12.1/drivers/dma/idxd/
Ddma.c212 dma_cap_set(DMA_INTERRUPT, dma->cap_mask); in idxd_register_dma_device()
213 dma_cap_set(DMA_PRIVATE, dma->cap_mask); in idxd_register_dma_device()
214 dma_cap_set(DMA_COMPLETION_NO_ORDER, dma->cap_mask); in idxd_register_dma_device()
219 dma_cap_set(DMA_MEMCPY, dma->cap_mask); in idxd_register_dma_device()
/linux-6.12.1/drivers/infiniband/hw/hfi1/
Ddriver.c50 module_param_cb(cap_mask, &cap_ops, &hfi1_cap_mask, S_IWUSR | S_IRUGO);
51 MODULE_PARM_DESC(cap_mask, "Bit mask of enabled/disabled HW features");
73 cap_mask = *cap_mask_ptr, value, diff, in hfi1_caps_set() local
83 diff = value ^ (cap_mask & ~HFI1_CAP_LOCKED_SMASK); in hfi1_caps_set()
95 cap_mask &= ~diff; in hfi1_caps_set()
97 cap_mask |= (value & diff); in hfi1_caps_set()
99 diff = (cap_mask & (HFI1_CAP_MUST_HAVE_KERN << HFI1_CAP_USER_SHIFT)) ^ in hfi1_caps_set()
100 ((cap_mask & HFI1_CAP_MUST_HAVE_KERN) << HFI1_CAP_USER_SHIFT); in hfi1_caps_set()
101 cap_mask &= ~diff; in hfi1_caps_set()
103 *cap_mask_ptr = cap_mask; in hfi1_caps_set()
[all …]
/linux-6.12.1/include/linux/platform_data/
Ddma-mv_xor.h15 dma_cap_mask_t cap_mask; member
Ddma-iop32x.h102 dma_cap_mask_t cap_mask; member
/linux-6.12.1/drivers/dma/ptdma/
Dptdma-dmaengine.c360 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in pt_dmaengine_register()
361 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in pt_dmaengine_register()
367 dma_cap_set(DMA_PRIVATE, dma_dev->cap_mask); in pt_dmaengine_register()
/linux-6.12.1/drivers/net/ethernet/qlogic/qlcnic/
Dqlcnic_minidump.c295 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values()
296 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_82xx_cache_tmpl_hdr_values()
349 hdr->drv_cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values()
350 fw_dump->cap_mask = hdr->cap_mask; in qlcnic_83xx_cache_tmpl_hdr_values()
1278 fw_dump->cap_mask); in qlcnic_fw_cmd_get_minidump_temp()
1322 if (i & fw_dump->cap_mask) in qlcnic_dump_fw()
1353 if (!(entry->hdr.mask & fw_dump->cap_mask)) { in qlcnic_dump_fw()
1450 fw_dump->cap_mask = 0x1f; in qlcnic_83xx_get_minidump_template()
/linux-6.12.1/sound/soc/qcom/qdsp6/
Dq6afe-dai.c128 unsigned int cap_mask; in q6tdm_set_tdm_slot() local
141 cap_mask = 0x03; in q6tdm_set_tdm_slot()
144 cap_mask = 0x0F; in q6tdm_set_tdm_slot()
147 cap_mask = 0xFF; in q6tdm_set_tdm_slot()
150 cap_mask = 0xFFFF; in q6tdm_set_tdm_slot()
163 tdm->slot_mask = ((dai->id & 0x1) ? tx_mask : rx_mask) & cap_mask; in q6tdm_set_tdm_slot()
/linux-6.12.1/drivers/dma/ppc4xx/
Dadma.c1400 if (dma_has_cap(cap, ref->chan->device->cap_mask)) { in ppc440spe_async_tx_find_best_channel()
3772 dma_cap_set(DMA_MEMCPY, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3773 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3774 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3775 dma_cap_set(DMA_PQ_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3776 dma_cap_set(DMA_XOR_VAL, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3779 dma_cap_set(DMA_XOR, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3780 dma_cap_set(DMA_PQ, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3781 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask); in ppc440spe_adma_init_capabilities()
3782 adev->common.cap_mask = adev->common.cap_mask; in ppc440spe_adma_init_capabilities()
[all …]
/linux-6.12.1/drivers/dma/xilinx/
Dxdma.c1261 dma_cap_set(DMA_SLAVE, xdev->dma_dev.cap_mask); in xdma_probe()
1262 dma_cap_set(DMA_PRIVATE, xdev->dma_dev.cap_mask); in xdma_probe()
1263 dma_cap_set(DMA_CYCLIC, xdev->dma_dev.cap_mask); in xdma_probe()
1264 dma_cap_set(DMA_INTERLEAVE, xdev->dma_dev.cap_mask); in xdma_probe()
1265 dma_cap_set(DMA_REPEAT, xdev->dma_dev.cap_mask); in xdma_probe()
1266 dma_cap_set(DMA_LOAD_EOT, xdev->dma_dev.cap_mask); in xdma_probe()

123456