/linux-6.12.1/include/asm-generic/ |
D | percpu.h | 65 #define raw_cpu_generic_read(pcp) \ argument 67 *raw_cpu_ptr(&(pcp)); \ 70 #define raw_cpu_generic_to_op(pcp, val, op) \ argument 72 *raw_cpu_ptr(&(pcp)) op val; \ 75 #define raw_cpu_generic_add_return(pcp, val) \ argument 77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 83 #define raw_cpu_generic_xchg(pcp, nval) \ argument 85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 86 typeof(pcp) __ret; \ 92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \ argument [all …]
|
/linux-6.12.1/arch/s390/include/asm/ |
D | percpu.h | 28 #define arch_this_cpu_to_op_simple(pcp, val, op) \ argument 30 typedef typeof(pcp) pcp_op_T__; \ 34 ptr__ = raw_cpu_ptr(&(pcp)); \ 45 #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 46 #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 47 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 48 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 49 #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) argument 50 #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) argument 51 #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) argument [all …]
|
/linux-6.12.1/arch/x86/include/asm/ |
D | percpu.h | 153 #define __raw_cpu_read(size, qual, pcp) \ argument 155 *(qual __my_cpu_type(pcp) *)__my_cpu_ptr(&(pcp)); \ 158 #define __raw_cpu_write(size, qual, pcp, val) \ argument 160 *(qual __my_cpu_type(pcp) *)__my_cpu_ptr(&(pcp)) = (val); \ 163 #define __raw_cpu_read_const(pcp) __raw_cpu_read(, , pcp) argument 196 #define __raw_cpu_read_const(pcp) ({ BUILD_BUG(); (typeof(pcp))0; }) argument 363 #define raw_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg64_op(8, , pcp, oval, nval) argument 364 #define this_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg64_op(8, volatile, pcp, oval, nval) argument 397 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg64_op(8, , pcp, ovalp, n… argument 398 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg64_op(8, volatile, pcp, ovalp, n… argument [all …]
|
/linux-6.12.1/include/linux/ |
D | percpu-defs.h | 408 #define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp) argument 409 #define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val) argument 410 #define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val) argument 411 #define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val) argument 412 #define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val) argument 413 #define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val) argument 414 #define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval) argument 415 #define raw_cpu_cmpxchg(pcp, oval, nval) \ argument 416 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) 417 #define raw_cpu_try_cmpxchg(pcp, ovalp, nval) \ argument [all …]
|
D | gfp.h | 400 int decay_pcp_high(struct zone *zone, struct per_cpu_pages *pcp); 401 void drain_zone_pages(struct zone *zone, struct per_cpu_pages *pcp);
|
/linux-6.12.1/arch/loongarch/include/asm/ |
D | percpu.h | 119 #define _protect_cmpxchg_local(pcp, o, n) \ argument 121 typeof(*raw_cpu_ptr(&(pcp))) __ret; \ 123 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \ 128 #define _pcp_protect(operation, pcp, val) \ argument 130 typeof(pcp) __retval; \ 132 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \ 133 (val), sizeof(pcp)); \ 138 #define _percpu_add(pcp, val) \ argument 139 _pcp_protect(__percpu_add, pcp, val) 141 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val) argument [all …]
|
/linux-6.12.1/arch/arm64/include/asm/ |
D | percpu.h | 149 #define _pcp_protect(op, pcp, ...) \ argument 152 op(raw_cpu_ptr(&(pcp)), __VA_ARGS__); \ 156 #define _pcp_protect_return(op, pcp, args...) \ argument 158 typeof(pcp) __retval; \ 160 __retval = (typeof(pcp))op(raw_cpu_ptr(&(pcp)), ##args); \ 165 #define this_cpu_read_1(pcp) \ argument 166 _pcp_protect_return(__percpu_read_8, pcp) 167 #define this_cpu_read_2(pcp) \ argument 168 _pcp_protect_return(__percpu_read_16, pcp) 169 #define this_cpu_read_4(pcp) \ argument [all …]
|
/linux-6.12.1/Documentation/translations/zh_CN/core-api/ |
D | this_cpu_ops.rst | 46 this_cpu_read(pcp) 47 this_cpu_write(pcp, val) 48 this_cpu_add(pcp, val) 49 this_cpu_and(pcp, val) 50 this_cpu_or(pcp, val) 51 this_cpu_add_return(pcp, val) 52 this_cpu_xchg(pcp, nval) 53 this_cpu_cmpxchg(pcp, oval, nval) 55 this_cpu_sub(pcp, val) 56 this_cpu_inc(pcp) [all …]
|
/linux-6.12.1/mm/ |
D | page_alloc.c | 1176 struct per_cpu_pages *pcp, in free_pcppages_bulk() argument 1187 count = min(pcp->count, count); in free_pcppages_bulk() 1202 list = &pcp->lists[pindex]; in free_pcppages_bulk() 1218 pcp->count -= nr_pages; in free_pcppages_bulk() 2335 int decay_pcp_high(struct zone *zone, struct per_cpu_pages *pcp) in decay_pcp_high() argument 2340 high_min = READ_ONCE(pcp->high_min); in decay_pcp_high() 2341 batch = READ_ONCE(pcp->batch); in decay_pcp_high() 2347 if (pcp->high > high_min) { in decay_pcp_high() 2348 pcp->high = max3(pcp->count - (batch << CONFIG_PCP_BATCH_SCALE_MAX), in decay_pcp_high() 2349 pcp->high - (pcp->high >> 3), high_min); in decay_pcp_high() [all …]
|
D | vmstat.c | 345 struct per_cpu_zonestat __percpu *pcp = zone->per_cpu_zonestats; in __mod_zone_page_state() local 346 s8 __percpu *p = pcp->vm_stat_diff + item; in __mod_zone_page_state() 361 t = __this_cpu_read(pcp->stat_threshold); in __mod_zone_page_state() 376 struct per_cpu_nodestat __percpu *pcp = pgdat->per_cpu_nodestats; in __mod_node_page_state() local 377 s8 __percpu *p = pcp->vm_node_stat_diff + item; in __mod_node_page_state() 397 t = __this_cpu_read(pcp->stat_threshold); in __mod_node_page_state() 434 struct per_cpu_zonestat __percpu *pcp = zone->per_cpu_zonestats; in __inc_zone_state() local 435 s8 __percpu *p = pcp->vm_stat_diff + item; in __inc_zone_state() 442 t = __this_cpu_read(pcp->stat_threshold); in __inc_zone_state() 455 struct per_cpu_nodestat __percpu *pcp = pgdat->per_cpu_nodestats; in __inc_node_state() local [all …]
|
/linux-6.12.1/Documentation/core-api/ |
D | this_cpu_ops.rst | 48 this_cpu_read(pcp) 49 this_cpu_write(pcp, val) 50 this_cpu_add(pcp, val) 51 this_cpu_and(pcp, val) 52 this_cpu_or(pcp, val) 53 this_cpu_add_return(pcp, val) 54 this_cpu_xchg(pcp, nval) 55 this_cpu_cmpxchg(pcp, oval, nval) 56 this_cpu_sub(pcp, val) 57 this_cpu_inc(pcp) [all …]
|
/linux-6.12.1/drivers/net/dsa/sja1105/ |
D | sja1105_flower.c | 123 rule->key.tc.pcp = tc; in sja1105_setup_tc_policer() 187 key->tc.pcp, rate_bytes_per_sec, in sja1105_flower_policer() 205 u16 pcp = U16_MAX; in sja1105_flower_parse_key() local 277 pcp = match.key->vlan_priority; in sja1105_flower_parse_key() 280 if (is_bcast_dmac && vid == U16_MAX && pcp == U16_MAX) { in sja1105_flower_parse_key() 284 if (dmac == U64_MAX && vid == U16_MAX && pcp != U16_MAX) { in sja1105_flower_parse_key() 286 key->tc.pcp = pcp; in sja1105_flower_parse_key() 289 if (dmac != U64_MAX && vid != U16_MAX && pcp != U16_MAX) { in sja1105_flower_parse_key() 293 key->vl.pcp = pcp; in sja1105_flower_parse_key() 484 int index = (port * SJA1105_NUM_TC) + rule->key.tc.pcp; in sja1105_cls_flower_del()
|
D | sja1105.h | 182 int pcp; member 190 u16 pcp; member
|
/linux-6.12.1/lib/ |
D | percpu_test.c | 5 #define CHECK(native, pcp, expected) \ argument 11 WARN(__this_cpu_read(pcp) != (expected), \ 13 __this_cpu_read(pcp), __this_cpu_read(pcp), \
|
/linux-6.12.1/net/dsa/ |
D | tag_vsc73xx_8021q.c | 17 u8 pcp; in vsc73xx_xmit() local 29 pcp = netdev_txq_to_tc(netdev, queue_mapping); in vsc73xx_xmit() 32 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in vsc73xx_xmit()
|
D | tag_sja1105.c | 241 struct sk_buff *skb, u8 pcp) in sja1105_pvid_tag_control_pkt() argument 262 return vlan_insert_tag(skb, xmit_tpid, (pcp << VLAN_PRIO_SHIFT) | in sja1105_pvid_tag_control_pkt() 271 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in sja1105_xmit() local 282 skb = sja1105_pvid_tag_control_pkt(dp, skb, pcp); in sja1105_xmit() 290 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in sja1105_xmit() 299 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in sja1110_xmit() local 314 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in sja1110_xmit() 316 skb = sja1105_pvid_tag_control_pkt(dp, skb, pcp); in sja1110_xmit() 333 *tx_trailer = cpu_to_be32(SJA1110_TX_TRAILER_PRIO(pcp) | in sja1110_xmit()
|
D | tag_ocelot_8021q.c | 68 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in ocelot_xmit() local 76 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in ocelot_xmit()
|
/linux-6.12.1/tools/testing/selftests/drivers/net/microchip/ |
D | ksz9477_qos.sh | 390 local pcp 456 pcp=0 458 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 465 pcp=7 467 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 474 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 478 pcp=0 480 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp}
|
/linux-6.12.1/drivers/net/ethernet/microchip/lan966x/ |
D | lan966x_port.c | 402 u8 pcp, dp; in lan966x_port_qos_pcp_set() local 410 pcp = *(pcp_itr + i); in lan966x_port_qos_pcp_set() 413 lan_rmw(ANA_PCP_DEI_CFG_QOS_PCP_DEI_VAL_SET(pcp) | in lan966x_port_qos_pcp_set() 471 u8 pcp, dei; in lan966x_port_qos_pcp_rewr_set() local 487 pcp = qos->map[i]; in lan966x_port_qos_pcp_rewr_set() 488 if (pcp > LAN966X_PORT_QOS_PCP_COUNT) in lan966x_port_qos_pcp_rewr_set() 494 REW_PCP_DEI_CFG_PCP_QOS_VAL_SET(pcp), in lan966x_port_qos_pcp_rewr_set() 540 lan966x_port_qos_pcp_set(port, &qos->pcp); in lan966x_port_qos_set()
|
D | lan966x_dcb.c | 58 for (int i = 0; i < ARRAY_SIZE(qos.pcp.map); i++) { in lan966x_dcb_app_update() 61 qos.pcp.map[i] = dcb_getapp(dev, &app_itr); in lan966x_dcb_app_update() 98 qos.pcp.enable = true; in lan966x_dcb_app_update()
|
/linux-6.12.1/drivers/infiniband/ulp/opa_vnic/ |
D | opa_vnic_encap.c | 371 u8 pcp = OPA_VNIC_VLAN_PCP(vlan_tci); in opa_vnic_get_sc() local 374 sc = info->vport.pcp_to_sc_mc[pcp]; in opa_vnic_get_sc() 376 sc = info->vport.pcp_to_sc_uc[pcp]; in opa_vnic_get_sc() 394 u8 pcp = skb_vlan_tag_get(skb) >> VLAN_PRIO_SHIFT; in opa_vnic_get_vl() local 397 vl = info->vport.pcp_to_vl_mc[pcp]; in opa_vnic_get_vl() 399 vl = info->vport.pcp_to_vl_uc[pcp]; in opa_vnic_get_vl()
|
/linux-6.12.1/drivers/net/ethernet/microchip/sparx5/ |
D | sparx5_dcb.c | 150 pcp_map = &qos.pcp.map; in sparx5_dcb_app_update() 202 qos.pcp.qos_enable = true; in sparx5_dcb_app_update() 203 qos.pcp.dp_enable = qos.pcp.qos_enable; in sparx5_dcb_app_update()
|
D | sparx5_port.c | 1158 sparx5_port_qos_pcp_set(port, &qos->pcp); in sparx5_port_qos_set() 1171 u8 pcp, dei; in sparx5_port_qos_pcp_rewr_set() local 1187 pcp = qos->map.map[i]; in sparx5_port_qos_pcp_rewr_set() 1188 if (pcp > SPARX5_PORT_QOS_PCP_COUNT) in sparx5_port_qos_pcp_rewr_set() 1201 spx5_rmw(REW_PCP_MAP_DE1_PCP_DE1_SET(pcp), in sparx5_port_qos_pcp_rewr_set() 1209 spx5_rmw(REW_PCP_MAP_DE0_PCP_DE0_SET(pcp), in sparx5_port_qos_pcp_rewr_set() 1227 u8 pcp, dp; in sparx5_port_qos_pcp_set() local 1238 pcp = *(pcp_itr + i); in sparx5_port_qos_pcp_set() 1240 spx5_rmw(ANA_CL_PCP_DEI_MAP_CFG_PCP_DEI_QOS_VAL_SET(pcp) | in sparx5_port_qos_pcp_set()
|
D | sparx5_port.h | 150 struct sparx5_port_qos_pcp pcp; member
|
/linux-6.12.1/arch/sparc/kernel/ |
D | pcic.c | 608 struct pcidev_cookie *pcp; in pcibios_fixup_bus() local 631 pcp = pci_devcookie_alloc(); in pcibios_fixup_bus() 632 pcp->pbm = &pcic->pbm; in pcibios_fixup_bus() 633 pcp->prom_node = of_find_node_by_phandle(node); in pcibios_fixup_bus() 634 dev->sysdata = pcp; in pcibios_fixup_bus()
|