Lines Matching refs:ntb
152 if (!ntb_topo_is_b2b(ndev->ntb.topo)) in ndev_reset_unsafe_flags()
193 dev_dbg(&ndev->ntb.pdev->dev, "Peer db addr %llx\n", *db_addr); in ndev_db_addr()
198 dev_dbg(&ndev->ntb.pdev->dev, "Peer db size %llx\n", *db_size); in ndev_db_addr()
289 dev_dbg(&ndev->ntb.pdev->dev, "Peer spad addr %llx\n", in ndev_spad_addr()
331 dev_dbg(&ndev->ntb.pdev->dev, "vec %d vec_mask %llx\n", vec, vec_mask); in ndev_interrupt()
337 ntb_link_event(&ndev->ntb); in ndev_interrupt()
341 ntb_db_event(&ndev->ntb, vec); in ndev_interrupt()
350 dev_dbg(&nvec->ndev->ntb.pdev->dev, "irq: %d nvec->num: %d\n", in ndev_vec_isr()
360 return ndev_interrupt(ndev, irq - ndev->ntb.pdev->irq); in ndev_irq_isr()
370 pdev = ndev->ntb.pdev; in ndev_init_isr()
469 pdev = ndev->ntb.pdev; in ndev_deinit_isr()
503 pdev = ndev->ntb.pdev; in ndev_ntb_debugfs_read()
519 ntb_topo_string(ndev->ntb.topo)); in ndev_ntb_debugfs_read()
651 if (ntb_topo_is_b2b(ndev->ntb.topo)) { in ndev_ntb_debugfs_read()
762 if (pdev_is_gen1(ndev->ntb.pdev)) in ndev_debugfs_read()
764 else if (pdev_is_gen3(ndev->ntb.pdev)) in ndev_debugfs_read()
766 else if (pdev_is_gen4(ndev->ntb.pdev) || pdev_is_gen5(ndev->ntb.pdev)) in ndev_debugfs_read()
779 debugfs_create_dir(pci_name(ndev->ntb.pdev), in ndev_init_debugfs()
796 int intel_ntb_mw_count(struct ntb_dev *ntb, int pidx) in intel_ntb_mw_count() argument
801 return ntb_ndev(ntb)->mw_count; in intel_ntb_mw_count()
804 int intel_ntb_mw_get_align(struct ntb_dev *ntb, int pidx, int idx, in intel_ntb_mw_get_align() argument
809 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_mw_get_align()
823 bar_size = pci_resource_len(ndev->ntb.pdev, bar); in intel_ntb_mw_get_align()
831 *addr_align = pci_resource_len(ndev->ntb.pdev, bar); in intel_ntb_mw_get_align()
842 static int intel_ntb_mw_set_trans(struct ntb_dev *ntb, int pidx, int idx, in intel_ntb_mw_set_trans() argument
845 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_mw_set_trans()
862 bar_size = pci_resource_len(ndev->ntb.pdev, bar); in intel_ntb_mw_set_trans()
943 u64 intel_ntb_link_is_up(struct ntb_dev *ntb, enum ntb_speed *speed, in intel_ntb_link_is_up() argument
946 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_link_is_up()
965 static int intel_ntb_link_enable(struct ntb_dev *ntb, in intel_ntb_link_enable() argument
972 ndev = container_of(ntb, struct intel_ntb_dev, ntb); in intel_ntb_link_enable()
974 if (ndev->ntb.topo == NTB_TOPO_SEC) in intel_ntb_link_enable()
977 dev_dbg(&ntb->pdev->dev, in intel_ntb_link_enable()
981 dev_dbg(&ntb->pdev->dev, "ignoring max_speed %d\n", max_speed); in intel_ntb_link_enable()
983 dev_dbg(&ntb->pdev->dev, "ignoring max_width %d\n", max_width); in intel_ntb_link_enable()
996 int intel_ntb_link_disable(struct ntb_dev *ntb) in intel_ntb_link_disable() argument
1001 ndev = container_of(ntb, struct intel_ntb_dev, ntb); in intel_ntb_link_disable()
1003 if (ndev->ntb.topo == NTB_TOPO_SEC) in intel_ntb_link_disable()
1006 dev_dbg(&ntb->pdev->dev, "Disabling link\n"); in intel_ntb_link_disable()
1020 int intel_ntb_peer_mw_count(struct ntb_dev *ntb) in intel_ntb_peer_mw_count() argument
1023 return ntb_ndev(ntb)->mw_count; in intel_ntb_peer_mw_count()
1026 int intel_ntb_peer_mw_get_addr(struct ntb_dev *ntb, int idx, in intel_ntb_peer_mw_get_addr() argument
1029 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_mw_get_addr()
1040 *base = pci_resource_start(ndev->ntb.pdev, bar) + in intel_ntb_peer_mw_get_addr()
1044 *size = pci_resource_len(ndev->ntb.pdev, bar) - in intel_ntb_peer_mw_get_addr()
1050 static int intel_ntb_db_is_unsafe(struct ntb_dev *ntb) in intel_ntb_db_is_unsafe() argument
1052 return ndev_ignore_unsafe(ntb_ndev(ntb), NTB_UNSAFE_DB); in intel_ntb_db_is_unsafe()
1055 u64 intel_ntb_db_valid_mask(struct ntb_dev *ntb) in intel_ntb_db_valid_mask() argument
1057 return ntb_ndev(ntb)->db_valid_mask; in intel_ntb_db_valid_mask()
1060 int intel_ntb_db_vector_count(struct ntb_dev *ntb) in intel_ntb_db_vector_count() argument
1064 ndev = container_of(ntb, struct intel_ntb_dev, ntb); in intel_ntb_db_vector_count()
1069 u64 intel_ntb_db_vector_mask(struct ntb_dev *ntb, int db_vector) in intel_ntb_db_vector_mask() argument
1071 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_db_vector_mask()
1079 static u64 intel_ntb_db_read(struct ntb_dev *ntb) in intel_ntb_db_read() argument
1081 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_db_read()
1088 static int intel_ntb_db_clear(struct ntb_dev *ntb, u64 db_bits) in intel_ntb_db_clear() argument
1090 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_db_clear()
1097 int intel_ntb_db_set_mask(struct ntb_dev *ntb, u64 db_bits) in intel_ntb_db_set_mask() argument
1099 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_db_set_mask()
1106 int intel_ntb_db_clear_mask(struct ntb_dev *ntb, u64 db_bits) in intel_ntb_db_clear_mask() argument
1108 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_db_clear_mask()
1115 static int intel_ntb_peer_db_addr(struct ntb_dev *ntb, phys_addr_t *db_addr, in intel_ntb_peer_db_addr() argument
1119 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_db_addr()
1126 if (unlikely(db_bits & ~ntb_ndev(ntb)->db_valid_mask)) in intel_ntb_peer_db_addr()
1139 static int intel_ntb_peer_db_set(struct ntb_dev *ntb, u64 db_bits) in intel_ntb_peer_db_set() argument
1141 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_db_set()
1148 int intel_ntb_spad_is_unsafe(struct ntb_dev *ntb) in intel_ntb_spad_is_unsafe() argument
1150 return ndev_ignore_unsafe(ntb_ndev(ntb), NTB_UNSAFE_SPAD); in intel_ntb_spad_is_unsafe()
1153 int intel_ntb_spad_count(struct ntb_dev *ntb) in intel_ntb_spad_count() argument
1157 ndev = container_of(ntb, struct intel_ntb_dev, ntb); in intel_ntb_spad_count()
1162 u32 intel_ntb_spad_read(struct ntb_dev *ntb, int idx) in intel_ntb_spad_read() argument
1164 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_spad_read()
1171 int intel_ntb_spad_write(struct ntb_dev *ntb, int idx, u32 val) in intel_ntb_spad_write() argument
1173 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_spad_write()
1180 int intel_ntb_peer_spad_addr(struct ntb_dev *ntb, int pidx, int sidx, in intel_ntb_peer_spad_addr() argument
1183 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_spad_addr()
1189 u32 intel_ntb_peer_spad_read(struct ntb_dev *ntb, int pidx, int sidx) in intel_ntb_peer_spad_read() argument
1191 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_spad_read()
1198 int intel_ntb_peer_spad_write(struct ntb_dev *ntb, int pidx, int sidx, in intel_ntb_peer_spad_write() argument
1201 struct intel_ntb_dev *ndev = ntb_ndev(ntb); in intel_ntb_peer_spad_write()
1227 rc = pci_read_config_word(ndev->ntb.pdev, in xeon_poll_link()
1242 if (ndev->ntb.topo == NTB_TOPO_SEC) in xeon_link_is_up()
1272 dev_dbg(&ndev->ntb.pdev->dev, "PPD %d split bar\n", ppd); in xeon_ppd_bar4_split()
1302 pdev = ndev->ntb.pdev; in xeon_setup_b2b_mw()
1316 bar_size = pci_resource_len(ndev->ntb.pdev, b2b_bar); in xeon_setup_b2b_mw()
1539 struct device *dev = &ndev->ntb.pdev->dev; in xeon_init_ntb()
1552 switch (ndev->ntb.topo) { in xeon_init_ntb()
1612 if (ndev->ntb.topo == NTB_TOPO_B2B_USD) { in xeon_init_ntb()
1649 pdev = ndev->ntb.pdev; in xeon_init_dev()
1724 ndev->ntb.topo = xeon_ppd_topo(ndev, ppd); in xeon_init_dev()
1726 ntb_topo_string(ndev->ntb.topo)); in xeon_init_dev()
1727 if (ndev->ntb.topo == NTB_TOPO_NONE) in xeon_init_dev()
1730 if (ndev->ntb.topo != NTB_TOPO_SEC) { in xeon_init_dev()
1804 struct pci_dev *pdev = ndev->ntb.pdev; in intel_ntb_deinit_pci()
1818 ndev->ntb.pdev = pdev; in ndev_init_struct()
1819 ndev->ntb.topo = NTB_TOPO_NONE; in ndev_init_struct()
1820 ndev->ntb.ops = &intel_ntb_ops; in ndev_init_struct()
1867 ndev->ntb.ops = &intel_ntb3_ops; in intel_ntb_pci_probe()
1876 ndev->ntb.ops = &intel_ntb4_ops; in intel_ntb_pci_probe()
1895 rc = ntb_register_device(&ndev->ntb); in intel_ntb_pci_probe()
1920 ntb_unregister_device(&ndev->ntb); in intel_ntb_pci_remove()