1 // SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only)
2 /* Copyright(c) 2014 - 2021 Intel Corporation */
3 #include <adf_accel_devices.h>
4 #include <adf_admin.h>
5 #include <adf_common_drv.h>
6 #include <adf_gen2_config.h>
7 #include <adf_gen2_dc.h>
8 #include <adf_gen2_hw_csr_data.h>
9 #include <adf_gen2_hw_data.h>
10 #include <adf_gen2_pfvf.h>
11 #include "adf_dh895xcc_hw_data.h"
12 #include "adf_heartbeat.h"
13 #include "icp_qat_hw.h"
14 
15 #define ADF_DH895XCC_VF_MSK	0xFFFFFFFF
16 
17 /* Worker thread to service arbiter mappings */
18 static const u32 thrd_to_arb_map[ADF_DH895XCC_MAX_ACCELENGINES] = {
19 	0x12222AAA, 0x11666666, 0x12222AAA, 0x11666666,
20 	0x12222AAA, 0x11222222, 0x12222AAA, 0x11222222,
21 	0x12222AAA, 0x11222222, 0x12222AAA, 0x11222222
22 };
23 
24 static struct adf_hw_device_class dh895xcc_class = {
25 	.name = ADF_DH895XCC_DEVICE_NAME,
26 	.type = DEV_DH895XCC,
27 	.instances = 0
28 };
29 
get_accel_mask(struct adf_hw_device_data * self)30 static u32 get_accel_mask(struct adf_hw_device_data *self)
31 {
32 	u32 fuses = self->fuses;
33 
34 	return ~fuses >> ADF_DH895XCC_ACCELERATORS_REG_OFFSET &
35 			 ADF_DH895XCC_ACCELERATORS_MASK;
36 }
37 
get_ae_mask(struct adf_hw_device_data * self)38 static u32 get_ae_mask(struct adf_hw_device_data *self)
39 {
40 	u32 fuses = self->fuses;
41 
42 	return ~fuses & ADF_DH895XCC_ACCELENGINES_MASK;
43 }
44 
get_misc_bar_id(struct adf_hw_device_data * self)45 static u32 get_misc_bar_id(struct adf_hw_device_data *self)
46 {
47 	return ADF_DH895XCC_PMISC_BAR;
48 }
49 
get_ts_clock(struct adf_hw_device_data * self)50 static u32 get_ts_clock(struct adf_hw_device_data *self)
51 {
52 	/*
53 	 * Timestamp update interval is 16 AE clock ticks for dh895xcc.
54 	 */
55 	return self->clock_frequency / 16;
56 }
57 
get_etr_bar_id(struct adf_hw_device_data * self)58 static u32 get_etr_bar_id(struct adf_hw_device_data *self)
59 {
60 	return ADF_DH895XCC_ETR_BAR;
61 }
62 
get_sram_bar_id(struct adf_hw_device_data * self)63 static u32 get_sram_bar_id(struct adf_hw_device_data *self)
64 {
65 	return ADF_DH895XCC_SRAM_BAR;
66 }
67 
get_accel_cap(struct adf_accel_dev * accel_dev)68 static u32 get_accel_cap(struct adf_accel_dev *accel_dev)
69 {
70 	struct pci_dev *pdev = accel_dev->accel_pci_dev.pci_dev;
71 	u32 capabilities;
72 	u32 legfuses;
73 
74 	capabilities = ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC |
75 		       ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC |
76 		       ICP_ACCEL_CAPABILITIES_AUTHENTICATION |
77 		       ICP_ACCEL_CAPABILITIES_CIPHER |
78 		       ICP_ACCEL_CAPABILITIES_COMPRESSION;
79 
80 	/* Read accelerator capabilities mask */
81 	pci_read_config_dword(pdev, ADF_DEVICE_LEGFUSE_OFFSET, &legfuses);
82 
83 	/* A set bit in legfuses means the feature is OFF in this SKU */
84 	if (legfuses & ICP_ACCEL_MASK_CIPHER_SLICE) {
85 		capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC;
86 		capabilities &= ~ICP_ACCEL_CAPABILITIES_CIPHER;
87 	}
88 	if (legfuses & ICP_ACCEL_MASK_PKE_SLICE)
89 		capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC;
90 	if (legfuses & ICP_ACCEL_MASK_AUTH_SLICE) {
91 		capabilities &= ~ICP_ACCEL_CAPABILITIES_AUTHENTICATION;
92 		capabilities &= ~ICP_ACCEL_CAPABILITIES_CIPHER;
93 	}
94 	if (legfuses & ICP_ACCEL_MASK_COMPRESS_SLICE)
95 		capabilities &= ~ICP_ACCEL_CAPABILITIES_COMPRESSION;
96 
97 	return capabilities;
98 }
99 
get_sku(struct adf_hw_device_data * self)100 static enum dev_sku_info get_sku(struct adf_hw_device_data *self)
101 {
102 	int sku = (self->fuses & ADF_DH895XCC_FUSECTL_SKU_MASK)
103 	    >> ADF_DH895XCC_FUSECTL_SKU_SHIFT;
104 
105 	switch (sku) {
106 	case ADF_DH895XCC_FUSECTL_SKU_1:
107 		return DEV_SKU_1;
108 	case ADF_DH895XCC_FUSECTL_SKU_2:
109 		return DEV_SKU_2;
110 	case ADF_DH895XCC_FUSECTL_SKU_3:
111 		return DEV_SKU_3;
112 	case ADF_DH895XCC_FUSECTL_SKU_4:
113 		return DEV_SKU_4;
114 	default:
115 		return DEV_SKU_UNKNOWN;
116 	}
117 	return DEV_SKU_UNKNOWN;
118 }
119 
adf_get_arbiter_mapping(struct adf_accel_dev * accel_dev)120 static const u32 *adf_get_arbiter_mapping(struct adf_accel_dev *accel_dev)
121 {
122 	return thrd_to_arb_map;
123 }
124 
enable_vf2pf_interrupts(void __iomem * pmisc_addr,u32 vf_mask)125 static void enable_vf2pf_interrupts(void __iomem *pmisc_addr, u32 vf_mask)
126 {
127 	/* Enable VF2PF Messaging Ints - VFs 0 through 15 per vf_mask[15:0] */
128 	if (vf_mask & 0xFFFF) {
129 		u32 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK3)
130 			  & ~ADF_DH895XCC_ERR_MSK_VF2PF_L(vf_mask);
131 		ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK3, val);
132 	}
133 
134 	/* Enable VF2PF Messaging Ints - VFs 16 through 31 per vf_mask[31:16] */
135 	if (vf_mask >> 16) {
136 		u32 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK5)
137 			  & ~ADF_DH895XCC_ERR_MSK_VF2PF_U(vf_mask);
138 		ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK5, val);
139 	}
140 }
141 
disable_all_vf2pf_interrupts(void __iomem * pmisc_addr)142 static void disable_all_vf2pf_interrupts(void __iomem *pmisc_addr)
143 {
144 	u32 val;
145 
146 	/* Disable VF2PF interrupts for VFs 0 through 15 per vf_mask[15:0] */
147 	val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK3)
148 	      | ADF_DH895XCC_ERR_MSK_VF2PF_L(ADF_DH895XCC_VF_MSK);
149 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK3, val);
150 
151 	/* Disable VF2PF interrupts for VFs 16 through 31 per vf_mask[31:16] */
152 	val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK5)
153 	      | ADF_DH895XCC_ERR_MSK_VF2PF_U(ADF_DH895XCC_VF_MSK);
154 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK5, val);
155 }
156 
disable_pending_vf2pf_interrupts(void __iomem * pmisc_addr)157 static u32 disable_pending_vf2pf_interrupts(void __iomem *pmisc_addr)
158 {
159 	u32 sources, pending, disabled;
160 	u32 errsou3, errmsk3;
161 	u32 errsou5, errmsk5;
162 
163 	/* Get the interrupt sources triggered by VFs */
164 	errsou3 = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRSOU3);
165 	errsou5 = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRSOU5);
166 	sources = ADF_DH895XCC_ERR_REG_VF2PF_L(errsou3)
167 		  | ADF_DH895XCC_ERR_REG_VF2PF_U(errsou5);
168 
169 	if (!sources)
170 		return 0;
171 
172 	/* Get the already disabled interrupts */
173 	errmsk3 = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK3);
174 	errmsk5 = ADF_CSR_RD(pmisc_addr, ADF_GEN2_ERRMSK5);
175 	disabled = ADF_DH895XCC_ERR_REG_VF2PF_L(errmsk3)
176 		   | ADF_DH895XCC_ERR_REG_VF2PF_U(errmsk5);
177 
178 	pending = sources & ~disabled;
179 	if (!pending)
180 		return 0;
181 
182 	/* Due to HW limitations, when disabling the interrupts, we can't
183 	 * just disable the requested sources, as this would lead to missed
184 	 * interrupts if sources changes just before writing to ERRMSK3 and
185 	 * ERRMSK5.
186 	 * To work around it, disable all and re-enable only the sources that
187 	 * are not in vf_mask and were not already disabled. Re-enabling will
188 	 * trigger a new interrupt for the sources that have changed in the
189 	 * meantime, if any.
190 	 */
191 	errmsk3 |= ADF_DH895XCC_ERR_MSK_VF2PF_L(ADF_DH895XCC_VF_MSK);
192 	errmsk5 |= ADF_DH895XCC_ERR_MSK_VF2PF_U(ADF_DH895XCC_VF_MSK);
193 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK3, errmsk3);
194 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK5, errmsk5);
195 
196 	/* Update only section of errmsk3 and errmsk5 related to VF2PF */
197 	errmsk3 &= ~ADF_DH895XCC_ERR_MSK_VF2PF_L(ADF_DH895XCC_VF_MSK);
198 	errmsk5 &= ~ADF_DH895XCC_ERR_MSK_VF2PF_U(ADF_DH895XCC_VF_MSK);
199 
200 	errmsk3 |= ADF_DH895XCC_ERR_MSK_VF2PF_L(sources | disabled);
201 	errmsk5 |= ADF_DH895XCC_ERR_MSK_VF2PF_U(sources | disabled);
202 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK3, errmsk3);
203 	ADF_CSR_WR(pmisc_addr, ADF_GEN2_ERRMSK5, errmsk5);
204 
205 	/* Return the sources of the (new) interrupt(s) */
206 	return pending;
207 }
208 
configure_iov_threads(struct adf_accel_dev * accel_dev,bool enable)209 static void configure_iov_threads(struct adf_accel_dev *accel_dev, bool enable)
210 {
211 	adf_gen2_cfg_iov_thds(accel_dev, enable,
212 			      ADF_DH895XCC_AE2FUNC_MAP_GRP_A_NUM_REGS,
213 			      ADF_DH895XCC_AE2FUNC_MAP_GRP_B_NUM_REGS);
214 }
215 
adf_init_hw_data_dh895xcc(struct adf_hw_device_data * hw_data)216 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data)
217 {
218 	hw_data->dev_class = &dh895xcc_class;
219 	hw_data->instance_id = dh895xcc_class.instances++;
220 	hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS;
221 	hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK;
222 	hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS;
223 	hw_data->num_logical_accel = 1;
224 	hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES;
225 	hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET;
226 	hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK;
227 	hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP;
228 	hw_data->alloc_irq = adf_isr_resource_alloc;
229 	hw_data->free_irq = adf_isr_resource_free;
230 	hw_data->enable_error_correction = adf_gen2_enable_error_correction;
231 	hw_data->get_accel_mask = get_accel_mask;
232 	hw_data->get_ae_mask = get_ae_mask;
233 	hw_data->get_accel_cap = get_accel_cap;
234 	hw_data->get_num_accels = adf_gen2_get_num_accels;
235 	hw_data->get_num_aes = adf_gen2_get_num_aes;
236 	hw_data->get_etr_bar_id = get_etr_bar_id;
237 	hw_data->get_misc_bar_id = get_misc_bar_id;
238 	hw_data->get_admin_info = adf_gen2_get_admin_info;
239 	hw_data->get_arb_info = adf_gen2_get_arb_info;
240 	hw_data->get_sram_bar_id = get_sram_bar_id;
241 	hw_data->get_sku = get_sku;
242 	hw_data->fw_name = ADF_DH895XCC_FW;
243 	hw_data->fw_mmp_name = ADF_DH895XCC_MMP;
244 	hw_data->init_admin_comms = adf_init_admin_comms;
245 	hw_data->exit_admin_comms = adf_exit_admin_comms;
246 	hw_data->configure_iov_threads = configure_iov_threads;
247 	hw_data->send_admin_init = adf_send_admin_init;
248 	hw_data->init_arb = adf_init_arb;
249 	hw_data->exit_arb = adf_exit_arb;
250 	hw_data->get_arb_mapping = adf_get_arbiter_mapping;
251 	hw_data->enable_ints = adf_gen2_enable_ints;
252 	hw_data->reset_device = adf_reset_sbr;
253 	hw_data->disable_iov = adf_disable_sriov;
254 	hw_data->dev_config = adf_gen2_dev_config;
255 	hw_data->clock_frequency = ADF_DH895X_AE_FREQ;
256 	hw_data->get_hb_clock = get_ts_clock;
257 	hw_data->num_hb_ctrs = ADF_NUM_HB_CNT_PER_AE;
258 	hw_data->check_hb_ctrs = adf_heartbeat_check_ctrs;
259 
260 	adf_gen2_init_pf_pfvf_ops(&hw_data->pfvf_ops);
261 	hw_data->pfvf_ops.enable_vf2pf_interrupts = enable_vf2pf_interrupts;
262 	hw_data->pfvf_ops.disable_all_vf2pf_interrupts = disable_all_vf2pf_interrupts;
263 	hw_data->pfvf_ops.disable_pending_vf2pf_interrupts = disable_pending_vf2pf_interrupts;
264 	adf_gen2_init_hw_csr_ops(&hw_data->csr_ops);
265 	adf_gen2_init_dc_ops(&hw_data->dc_ops);
266 }
267 
adf_clean_hw_data_dh895xcc(struct adf_hw_device_data * hw_data)268 void adf_clean_hw_data_dh895xcc(struct adf_hw_device_data *hw_data)
269 {
270 	hw_data->dev_class->instances--;
271 }
272