Lines Matching full:binding

23 	 * the binding to remain alive. Each page pool using this binding holds
24 * a ref to keep the binding alive. Each allocated net_iov holds a
27 * The binding undos itself and unmaps the underlying dmabuf once all
28 * those refs are dropped and the binding is no longer desired or in
38 /* rxq's this binding is active on. */
41 /* ID of this binding. Globally unique to all bindings currently
64 struct net_devmem_dmabuf_binding *binding; member
67 void __net_devmem_dmabuf_binding_free(struct net_devmem_dmabuf_binding *binding);
71 void net_devmem_unbind_dmabuf(struct net_devmem_dmabuf_binding *binding);
73 struct net_devmem_dmabuf_binding *binding,
91 return net_iov_owner(niov)->binding; in net_iov_binding()
104 return net_iov_owner(niov)->binding->id; in net_iov_binding_id()
108 net_devmem_dmabuf_binding_get(struct net_devmem_dmabuf_binding *binding) in net_devmem_dmabuf_binding_get() argument
110 refcount_inc(&binding->ref); in net_devmem_dmabuf_binding_get()
114 net_devmem_dmabuf_binding_put(struct net_devmem_dmabuf_binding *binding) in net_devmem_dmabuf_binding_put() argument
116 if (!refcount_dec_and_test(&binding->ref)) in net_devmem_dmabuf_binding_put()
119 __net_devmem_dmabuf_binding_free(binding); in net_devmem_dmabuf_binding_put()
123 net_devmem_alloc_dmabuf(struct net_devmem_dmabuf_binding *binding);
130 __net_devmem_dmabuf_binding_free(struct net_devmem_dmabuf_binding *binding) in __net_devmem_dmabuf_binding_free() argument
142 net_devmem_unbind_dmabuf(struct net_devmem_dmabuf_binding *binding) in net_devmem_unbind_dmabuf() argument
148 struct net_devmem_dmabuf_binding *binding, in net_devmem_bind_dmabuf_to_queue() argument
160 net_devmem_alloc_dmabuf(struct net_devmem_dmabuf_binding *binding) in net_devmem_alloc_dmabuf() argument