Lines Matching full:cd

38  * @cd:	        genwqe device descriptor
44 int __genwqe_writeq(struct genwqe_dev *cd, u64 byte_offs, u64 val) in __genwqe_writeq() argument
46 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writeq()
48 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writeq()
51 if (cd->mmio == NULL) in __genwqe_writeq()
57 __raw_writeq((__force u64)cpu_to_be64(val), cd->mmio + byte_offs); in __genwqe_writeq()
63 * @cd: genwqe device descriptor
68 u64 __genwqe_readq(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readq() argument
70 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readq()
73 if ((cd->err_inject & GENWQE_INJECT_GFIR_FATAL) && in __genwqe_readq()
77 if ((cd->err_inject & GENWQE_INJECT_GFIR_INFO) && in __genwqe_readq()
81 if (cd->mmio == NULL) in __genwqe_readq()
84 return be64_to_cpu((__force __be64)__raw_readq(cd->mmio + byte_offs)); in __genwqe_readq()
89 * @cd: genwqe device descriptor
95 int __genwqe_writel(struct genwqe_dev *cd, u64 byte_offs, u32 val) in __genwqe_writel() argument
97 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writel()
99 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writel()
102 if (cd->mmio == NULL) in __genwqe_writel()
108 __raw_writel((__force u32)cpu_to_be32(val), cd->mmio + byte_offs); in __genwqe_writel()
114 * @cd: genwqe device descriptor
119 u32 __genwqe_readl(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readl() argument
121 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readl()
124 if (cd->mmio == NULL) in __genwqe_readl()
127 return be32_to_cpu((__force __be32)__raw_readl(cd->mmio + byte_offs)); in __genwqe_readl()
132 * @cd: genwqe device descriptor
138 int genwqe_read_app_id(struct genwqe_dev *cd, char *app_name, int len) in genwqe_read_app_id() argument
141 u32 app_id = (u32)cd->app_unitcfg; in genwqe_read_app_id()
210 void *__genwqe_alloc_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_alloc_consistent() argument
216 return dma_alloc_coherent(&cd->pci_dev->dev, size, dma_handle, in __genwqe_alloc_consistent()
220 void __genwqe_free_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_free_consistent() argument
226 dma_free_coherent(&cd->pci_dev->dev, size, vaddr, dma_handle); in __genwqe_free_consistent()
229 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
233 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_unmap_pages()
242 static int genwqe_map_pages(struct genwqe_dev *cd, in genwqe_map_pages() argument
247 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_map_pages()
271 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
291 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
295 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_alloc_sync_sgl()
317 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl()
327 sgl->fpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
340 sgl->lpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
355 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_alloc_sync_sgl()
360 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_alloc_sync_sgl()
365 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl()
374 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_setup_sgl() argument
466 * @cd: genwqe device descriptor
473 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl) in genwqe_free_sync_sgl() argument
478 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_free_sync_sgl()
491 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_free_sync_sgl()
508 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_free_sync_sgl()
513 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl()
524 * @cd: pointer to genwqe device
545 int genwqe_user_vmap(struct genwqe_dev *cd, struct dma_mapping *m, void *uaddr, in genwqe_user_vmap() argument
550 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vmap()
595 rc = genwqe_map_pages(cd, m->page_list, m->nr_pages, m->dma_list); in genwqe_user_vmap()
617 * @cd: pointer to genwqe device
620 int genwqe_user_vunmap(struct genwqe_dev *cd, struct dma_mapping *m) in genwqe_user_vunmap() argument
622 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vunmap()
631 genwqe_unmap_pages(cd, m->dma_list, m->nr_pages); in genwqe_user_vunmap()
649 * @cd: pointer to the genwqe device descriptor
655 u8 genwqe_card_type(struct genwqe_dev *cd) in genwqe_card_type() argument
657 u64 card_type = cd->slu_unitcfg; in genwqe_card_type()
664 * @cd: pointer to the genwqe device descriptor
666 int genwqe_card_reset(struct genwqe_dev *cd) in genwqe_card_reset() argument
669 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_card_reset()
671 if (!genwqe_is_privileged(cd)) in genwqe_card_reset()
675 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, 0x1ull); in genwqe_card_reset()
677 __genwqe_readq(cd, IO_HSU_FIR_CLR); in genwqe_card_reset()
678 __genwqe_readq(cd, IO_APP_FIR_CLR); in genwqe_card_reset()
679 __genwqe_readq(cd, IO_SLU_FIR_CLR); in genwqe_card_reset()
689 softrst = __genwqe_readq(cd, IO_SLC_CFGREG_SOFTRESET) & 0x3cull; in genwqe_card_reset()
690 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, softrst | 0x2ull); in genwqe_card_reset()
695 if (genwqe_need_err_masking(cd)) { in genwqe_card_reset()
698 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_card_reset()
703 int genwqe_read_softreset(struct genwqe_dev *cd) in genwqe_read_softreset() argument
707 if (!genwqe_is_privileged(cd)) in genwqe_read_softreset()
710 bitstream = __genwqe_readq(cd, IO_SLU_BITSTREAM) & 0x1; in genwqe_read_softreset()
711 cd->softreset = (bitstream == 0) ? 0x8ull : 0xcull; in genwqe_read_softreset()
717 * @cd: pointer to the device
721 int genwqe_set_interrupt_capability(struct genwqe_dev *cd, int count) in genwqe_set_interrupt_capability() argument
725 rc = pci_alloc_irq_vectors(cd->pci_dev, 1, count, PCI_IRQ_MSI); in genwqe_set_interrupt_capability()
733 * @cd: pointer to the device
735 void genwqe_reset_interrupt_capability(struct genwqe_dev *cd) in genwqe_reset_interrupt_capability() argument
737 pci_free_irq_vectors(cd->pci_dev); in genwqe_reset_interrupt_capability()
742 * @cd: card device
750 static int set_reg_idx(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg_idx() argument
764 static int set_reg(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg() argument
767 return set_reg_idx(cd, r, i, m, addr, 0, val); in set_reg()
770 int genwqe_read_ffdc_regs(struct genwqe_dev *cd, struct genwqe_reg *regs, in genwqe_read_ffdc_regs() argument
778 gfir = __genwqe_readq(cd, IO_SLC_CFGREG_GFIR); in genwqe_read_ffdc_regs()
779 set_reg(cd, regs, &idx, max_regs, IO_SLC_CFGREG_GFIR, gfir); in genwqe_read_ffdc_regs()
782 sluid = __genwqe_readq(cd, IO_SLU_UNITCFG); /* 0x00000000 */ in genwqe_read_ffdc_regs()
783 set_reg(cd, regs, &idx, max_regs, IO_SLU_UNITCFG, sluid); in genwqe_read_ffdc_regs()
786 appid = __genwqe_readq(cd, IO_APP_UNITCFG); /* 0x02000000 */ in genwqe_read_ffdc_regs()
787 set_reg(cd, regs, &idx, max_regs, IO_APP_UNITCFG, appid); in genwqe_read_ffdc_regs()
794 ufir = __genwqe_readq(cd, ufir_addr); in genwqe_read_ffdc_regs()
795 set_reg(cd, regs, &idx, max_regs, ufir_addr, ufir); in genwqe_read_ffdc_regs()
799 ufec = __genwqe_readq(cd, ufec_addr); in genwqe_read_ffdc_regs()
800 set_reg(cd, regs, &idx, max_regs, ufec_addr, ufec); in genwqe_read_ffdc_regs()
808 sfir = __genwqe_readq(cd, sfir_addr); in genwqe_read_ffdc_regs()
809 set_reg(cd, regs, &idx, max_regs, sfir_addr, sfir); in genwqe_read_ffdc_regs()
812 sfec = __genwqe_readq(cd, sfec_addr); in genwqe_read_ffdc_regs()
813 set_reg(cd, regs, &idx, max_regs, sfec_addr, sfec); in genwqe_read_ffdc_regs()
827 * @cd: genwqe device descriptor
830 int genwqe_ffdc_buff_size(struct genwqe_dev *cd, int uid) in genwqe_ffdc_buff_size() argument
837 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_size()
843 val = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_size()
866 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_size()
882 * @cd: genwqe device descriptor
887 int genwqe_ffdc_buff_read(struct genwqe_dev *cd, int uid, in genwqe_ffdc_buff_read() argument
896 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_read()
901 e = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_read()
912 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
913 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
919 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
920 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
935 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
953 __genwqe_writeq(cd, addr, diag_sel); in genwqe_ffdc_buff_read()
960 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
961 set_reg_idx(cd, regs, &idx, max_regs, addr, in genwqe_ffdc_buff_read()
971 * @cd: genwqe device descriptor
979 int genwqe_write_vreg(struct genwqe_dev *cd, u32 reg, u64 val, int func) in genwqe_write_vreg() argument
981 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_write_vreg()
982 __genwqe_writeq(cd, reg, val); in genwqe_write_vreg()
988 * @cd: genwqe device descriptor
995 u64 genwqe_read_vreg(struct genwqe_dev *cd, u32 reg, int func) in genwqe_read_vreg() argument
997 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_read_vreg()
998 return __genwqe_readq(cd, reg); in genwqe_read_vreg()
1003 * @cd: genwqe device descriptor
1014 int genwqe_base_clock_frequency(struct genwqe_dev *cd) in genwqe_base_clock_frequency() argument
1019 speed = (u16)((cd->slu_unitcfg >> 28) & 0x0full); in genwqe_base_clock_frequency()
1028 * @cd: genwqe device descriptor
1032 void genwqe_stop_traps(struct genwqe_dev *cd) in genwqe_stop_traps() argument
1034 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_SET, 0xcull); in genwqe_stop_traps()
1039 * @cd: genwqe device descriptor
1043 void genwqe_start_traps(struct genwqe_dev *cd) in genwqe_start_traps() argument
1045 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_CLR, 0xcull); in genwqe_start_traps()
1047 if (genwqe_need_err_masking(cd)) in genwqe_start_traps()
1048 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_start_traps()