Home
last modified time | relevance | path

Searched refs:smp_wmb (Results 1 – 25 of 400) sorted by relevance

12345678910>>...16

/linux-6.12.1/security/selinux/
Dstatus.c88 smp_wmb(); in selinux_status_update_setenforce()
92 smp_wmb(); in selinux_status_update_setenforce()
113 smp_wmb(); in selinux_status_update_policyload()
118 smp_wmb(); in selinux_status_update_policyload()
/linux-6.12.1/include/asm-generic/
Dbarrier.h106 #ifndef smp_wmb
107 #define smp_wmb() do { kcsan_wmb(); __smp_wmb(); } while (0) macro
120 #ifndef smp_wmb
121 #define smp_wmb() barrier() macro
/linux-6.12.1/arch/arm64/include/asm/vdso/
Dcompat_barrier.h29 #undef smp_wmb
33 #define smp_wmb() aarch32_smp_wmb() macro
/linux-6.12.1/tools/virtio/ringtest/
Dmain.h143 #define smp_wmb() barrier() macro
145 #define smp_wmb() asm volatile("dmb ishst" ::: "memory") macro
147 #define smp_wmb() smp_release() macro
/linux-6.12.1/tools/memory-model/litmus-tests/
DS+fencewmbonceonce+poacquireonce.litmus6 * Can a smp_wmb(), instead of a release, and an acquire order a prior
15 smp_wmb();
DMP+fencewmbonceonce+fencermbonceonce.litmus6 * This litmus test demonstrates that smp_wmb() and smp_rmb() provide
16 smp_wmb();
/linux-6.12.1/tools/include/asm/
Dbarrier.h42 #ifndef smp_wmb
43 # define smp_wmb() wmb() macro
/linux-6.12.1/kernel/
Dwatchdog_buddy.c59 smp_wmb(); in watchdog_hardlockup_enable()
83 smp_wmb(); in watchdog_hardlockup_disable()
/linux-6.12.1/arch/sparc/include/asm/
Dvvar.h65 smp_wmb(); /* Makes sure that increment of seq is reflected */ in vvar_write_begin()
70 smp_wmb(); /* Makes the value of seq current before we increment */ in vvar_write_end()
/linux-6.12.1/arch/openrisc/kernel/
Dsync-timer.c56 smp_wmb(); in synchronise_count_master()
77 smp_wmb(); in synchronise_count_master()
/linux-6.12.1/include/vdso/
Dhelpers.h40 smp_wmb(); in vdso_write_begin()
45 smp_wmb(); in vdso_write_end()
/linux-6.12.1/include/linux/
Dseqlock.h409 smp_wmb(); in do_raw_write_seqcount_begin()
428 smp_wmb(); in do_raw_write_seqcount_end()
552 smp_wmb(); in do_raw_write_seqcount_barrier()
570 smp_wmb(); in do_write_seqcount_invalidate()
721 smp_wmb(); /* prior stores before incrementing "sequence" */ in raw_write_seqcount_latch()
723 smp_wmb(); /* increment "sequence" before following stores */ in raw_write_seqcount_latch()
/linux-6.12.1/arch/x86/include/asm/
Dpgtable-3level.h39 smp_wmb(); in native_set_pte()
70 smp_wmb(); in native_pte_clear()
77 smp_wmb(); in native_pmd_clear()
/linux-6.12.1/arch/arm/mach-socfpga/
Dplatsmp.c36 smp_wmb(); in socfpga_boot_secondary()
59 smp_wmb(); in socfpga_a10_boot_secondary()
/linux-6.12.1/arch/mips/kernel/
Drtlx.c276 smp_wmb(); in rtlx_read()
278 smp_wmb(); in rtlx_read()
318 smp_wmb(); in rtlx_write()
320 smp_wmb(); in rtlx_write()
/linux-6.12.1/arch/powerpc/platforms/pseries/
Ddtl.c87 smp_wmb(); in consume_dtle()
100 smp_wmb(); in dtl_start()
116 smp_wmb(); in dtl_stop()
157 smp_wmb(); in dtl_start()
/linux-6.12.1/tools/testing/selftests/kvm/
Drseq_test.c98 smp_wmb(); in migration_worker()
102 smp_wmb(); in migration_worker()
/linux-6.12.1/net/llc/
Dllc_input.c45 smp_wmb(); /* ensure initialisation is complete before it's called */ in llc_add_pack()
61 smp_wmb(); in llc_set_station_handler()
/linux-6.12.1/arch/powerpc/sysdev/
Dfsl_lbc.c247 smp_wmb(); in fsl_lbc_ctrl_irq()
253 smp_wmb(); in fsl_lbc_ctrl_irq()
257 smp_wmb(); in fsl_lbc_ctrl_irq()
/linux-6.12.1/Documentation/translations/zh_CN/core-api/
Dlocal_ops.rst126 的CPU上分别使用显式的 ``smp_wmb()`` 和 ``smp_rmb()`` 内存屏障。如果你使
128 冲区写和计数器增量之间应该有一个 ``smp_wmb()`` ,在计数器读和缓冲区读之间
/linux-6.12.1/net/wireless/
Dwext-spy.c49 smp_wmb(); in iw_handler_set_spy()
65 smp_wmb(); in iw_handler_set_spy()
/linux-6.12.1/tools/arch/x86/include/asm/
Dbarrier.h28 #define smp_wmb() barrier() macro
/linux-6.12.1/arch/arm/mach-versatile/
Dplatsmp.c39 smp_wmb(); in versatile_write_cpu_release()
/linux-6.12.1/tools/arch/riscv/include/asm/
Dbarrier.h24 #define smp_wmb() RISCV_FENCE(w, w) macro
/linux-6.12.1/tools/memory-model/Documentation/
Drecipes.txt290 It is usually better to use smp_store_release() instead of smp_wmb()
292 smp_wmb() and smp_rmb() APIs are still heavily used, so it is important
299 smp_wmb();
310 The smp_wmb() macro orders prior stores against later stores, and the
319 smp_wmb();
337 * smp_wmb() (B) smp_mb() (D)
341 The B/C pairing is an example of the MP pattern using smp_wmb() on the
344 Of course, given that smp_mb() is strictly stronger than either smp_wmb()
346 smp_wmb() would also work with smp_mb() replacing either or both of the
392 * smp_wmb() (B) smp_mb() (D)

12345678910>>...16