/linux-6.12.1/security/selinux/ |
D | status.c | 88 smp_wmb(); in selinux_status_update_setenforce() 92 smp_wmb(); in selinux_status_update_setenforce() 113 smp_wmb(); in selinux_status_update_policyload() 118 smp_wmb(); in selinux_status_update_policyload()
|
/linux-6.12.1/include/asm-generic/ |
D | barrier.h | 106 #ifndef smp_wmb 107 #define smp_wmb() do { kcsan_wmb(); __smp_wmb(); } while (0) macro 120 #ifndef smp_wmb 121 #define smp_wmb() barrier() macro
|
/linux-6.12.1/arch/arm64/include/asm/vdso/ |
D | compat_barrier.h | 29 #undef smp_wmb 33 #define smp_wmb() aarch32_smp_wmb() macro
|
/linux-6.12.1/tools/virtio/ringtest/ |
D | main.h | 143 #define smp_wmb() barrier() macro 145 #define smp_wmb() asm volatile("dmb ishst" ::: "memory") macro 147 #define smp_wmb() smp_release() macro
|
/linux-6.12.1/tools/memory-model/litmus-tests/ |
D | S+fencewmbonceonce+poacquireonce.litmus | 6 * Can a smp_wmb(), instead of a release, and an acquire order a prior 15 smp_wmb();
|
D | MP+fencewmbonceonce+fencermbonceonce.litmus | 6 * This litmus test demonstrates that smp_wmb() and smp_rmb() provide 16 smp_wmb();
|
/linux-6.12.1/tools/include/asm/ |
D | barrier.h | 42 #ifndef smp_wmb 43 # define smp_wmb() wmb() macro
|
/linux-6.12.1/kernel/ |
D | watchdog_buddy.c | 59 smp_wmb(); in watchdog_hardlockup_enable() 83 smp_wmb(); in watchdog_hardlockup_disable()
|
/linux-6.12.1/arch/sparc/include/asm/ |
D | vvar.h | 65 smp_wmb(); /* Makes sure that increment of seq is reflected */ in vvar_write_begin() 70 smp_wmb(); /* Makes the value of seq current before we increment */ in vvar_write_end()
|
/linux-6.12.1/arch/openrisc/kernel/ |
D | sync-timer.c | 56 smp_wmb(); in synchronise_count_master() 77 smp_wmb(); in synchronise_count_master()
|
/linux-6.12.1/include/vdso/ |
D | helpers.h | 40 smp_wmb(); in vdso_write_begin() 45 smp_wmb(); in vdso_write_end()
|
/linux-6.12.1/include/linux/ |
D | seqlock.h | 409 smp_wmb(); in do_raw_write_seqcount_begin() 428 smp_wmb(); in do_raw_write_seqcount_end() 552 smp_wmb(); in do_raw_write_seqcount_barrier() 570 smp_wmb(); in do_write_seqcount_invalidate() 721 smp_wmb(); /* prior stores before incrementing "sequence" */ in raw_write_seqcount_latch() 723 smp_wmb(); /* increment "sequence" before following stores */ in raw_write_seqcount_latch()
|
/linux-6.12.1/arch/x86/include/asm/ |
D | pgtable-3level.h | 39 smp_wmb(); in native_set_pte() 70 smp_wmb(); in native_pte_clear() 77 smp_wmb(); in native_pmd_clear()
|
/linux-6.12.1/arch/arm/mach-socfpga/ |
D | platsmp.c | 36 smp_wmb(); in socfpga_boot_secondary() 59 smp_wmb(); in socfpga_a10_boot_secondary()
|
/linux-6.12.1/arch/mips/kernel/ |
D | rtlx.c | 276 smp_wmb(); in rtlx_read() 278 smp_wmb(); in rtlx_read() 318 smp_wmb(); in rtlx_write() 320 smp_wmb(); in rtlx_write()
|
/linux-6.12.1/arch/powerpc/platforms/pseries/ |
D | dtl.c | 87 smp_wmb(); in consume_dtle() 100 smp_wmb(); in dtl_start() 116 smp_wmb(); in dtl_stop() 157 smp_wmb(); in dtl_start()
|
/linux-6.12.1/tools/testing/selftests/kvm/ |
D | rseq_test.c | 98 smp_wmb(); in migration_worker() 102 smp_wmb(); in migration_worker()
|
/linux-6.12.1/net/llc/ |
D | llc_input.c | 45 smp_wmb(); /* ensure initialisation is complete before it's called */ in llc_add_pack() 61 smp_wmb(); in llc_set_station_handler()
|
/linux-6.12.1/arch/powerpc/sysdev/ |
D | fsl_lbc.c | 247 smp_wmb(); in fsl_lbc_ctrl_irq() 253 smp_wmb(); in fsl_lbc_ctrl_irq() 257 smp_wmb(); in fsl_lbc_ctrl_irq()
|
/linux-6.12.1/Documentation/translations/zh_CN/core-api/ |
D | local_ops.rst | 126 的CPU上分别使用显式的 ``smp_wmb()`` 和 ``smp_rmb()`` 内存屏障。如果你使 128 冲区写和计数器增量之间应该有一个 ``smp_wmb()`` ,在计数器读和缓冲区读之间
|
/linux-6.12.1/net/wireless/ |
D | wext-spy.c | 49 smp_wmb(); in iw_handler_set_spy() 65 smp_wmb(); in iw_handler_set_spy()
|
/linux-6.12.1/tools/arch/x86/include/asm/ |
D | barrier.h | 28 #define smp_wmb() barrier() macro
|
/linux-6.12.1/arch/arm/mach-versatile/ |
D | platsmp.c | 39 smp_wmb(); in versatile_write_cpu_release()
|
/linux-6.12.1/tools/arch/riscv/include/asm/ |
D | barrier.h | 24 #define smp_wmb() RISCV_FENCE(w, w) macro
|
/linux-6.12.1/tools/memory-model/Documentation/ |
D | recipes.txt | 290 It is usually better to use smp_store_release() instead of smp_wmb() 292 smp_wmb() and smp_rmb() APIs are still heavily used, so it is important 299 smp_wmb(); 310 The smp_wmb() macro orders prior stores against later stores, and the 319 smp_wmb(); 337 * smp_wmb() (B) smp_mb() (D) 341 The B/C pairing is an example of the MP pattern using smp_wmb() on the 344 Of course, given that smp_mb() is strictly stronger than either smp_wmb() 346 smp_wmb() would also work with smp_mb() replacing either or both of the 392 * smp_wmb() (B) smp_mb() (D)
|