/linux-6.12.1/drivers/crypto/intel/qat/qat_c3xxx/ |
D | adf_c3xxx_hw_data.c | 116 void adf_init_hw_data_c3xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxx() argument 118 hw_data->dev_class = &c3xxx_class; in adf_init_hw_data_c3xxx() 119 hw_data->instance_id = c3xxx_class.instances++; in adf_init_hw_data_c3xxx() 120 hw_data->num_banks = ADF_C3XXX_ETR_MAX_BANKS; in adf_init_hw_data_c3xxx() 121 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxx() 122 hw_data->num_accel = ADF_C3XXX_MAX_ACCELERATORS; in adf_init_hw_data_c3xxx() 123 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxx() 124 hw_data->num_engines = ADF_C3XXX_MAX_ACCELENGINES; in adf_init_hw_data_c3xxx() 125 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxx() 126 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_c3xxx() [all …]
|
D | adf_drv.c | 78 struct adf_hw_device_data *hw_data; in adf_probe() local 118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 120 if (!hw_data) { in adf_probe() 125 accel_dev->hw_device = hw_data; in adf_probe() 129 &hw_data->fuses); in adf_probe() 131 &hw_data->straps); in adf_probe() 134 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 135 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 136 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe() 138 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe() [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_c62x/ |
D | adf_c62x_hw_data.c | 118 void adf_init_hw_data_c62x(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62x() argument 120 hw_data->dev_class = &c62x_class; in adf_init_hw_data_c62x() 121 hw_data->instance_id = c62x_class.instances++; in adf_init_hw_data_c62x() 122 hw_data->num_banks = ADF_C62X_ETR_MAX_BANKS; in adf_init_hw_data_c62x() 123 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62x() 124 hw_data->num_accel = ADF_C62X_MAX_ACCELERATORS; in adf_init_hw_data_c62x() 125 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62x() 126 hw_data->num_engines = ADF_C62X_MAX_ACCELENGINES; in adf_init_hw_data_c62x() 127 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_c62x() 128 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_c62x() [all …]
|
D | adf_drv.c | 78 struct adf_hw_device_data *hw_data; in adf_probe() local 118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 120 if (!hw_data) { in adf_probe() 125 accel_dev->hw_device = hw_data; in adf_probe() 129 &hw_data->fuses); in adf_probe() 131 &hw_data->straps); in adf_probe() 134 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 135 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 136 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe() 138 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe() [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_dh895xccvf/ |
D | adf_dh895xccvf_hw_data.c | 63 void adf_init_hw_data_dh895xcciov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcciov() argument 65 hw_data->dev_class = &dh895xcciov_class; in adf_init_hw_data_dh895xcciov() 66 hw_data->num_banks = ADF_DH895XCCIOV_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcciov() 67 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcciov() 68 hw_data->num_accel = ADF_DH895XCCIOV_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcciov() 69 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcciov() 70 hw_data->num_engines = ADF_DH895XCCIOV_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcciov() 71 hw_data->tx_rx_gap = ADF_DH895XCCIOV_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcciov() 72 hw_data->tx_rings_mask = ADF_DH895XCCIOV_TX_RINGS_MASK; in adf_init_hw_data_dh895xcciov() 73 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_dh895xcciov() [all …]
|
D | adf_drv.c | 79 struct adf_hw_device_data *hw_data; in adf_probe() local 112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 114 if (!hw_data) { in adf_probe() 118 accel_dev->hw_device = hw_data; in adf_probe() 122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_c62xvf/ |
D | adf_c62xvf_hw_data.c | 63 void adf_init_hw_data_c62xiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62xiov() argument 65 hw_data->dev_class = &c62xiov_class; in adf_init_hw_data_c62xiov() 66 hw_data->num_banks = ADF_C62XIOV_ETR_MAX_BANKS; in adf_init_hw_data_c62xiov() 67 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62xiov() 68 hw_data->num_accel = ADF_C62XIOV_MAX_ACCELERATORS; in adf_init_hw_data_c62xiov() 69 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62xiov() 70 hw_data->num_engines = ADF_C62XIOV_MAX_ACCELENGINES; in adf_init_hw_data_c62xiov() 71 hw_data->tx_rx_gap = ADF_C62XIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c62xiov() 72 hw_data->tx_rings_mask = ADF_C62XIOV_TX_RINGS_MASK; in adf_init_hw_data_c62xiov() 73 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_c62xiov() [all …]
|
D | adf_drv.c | 79 struct adf_hw_device_data *hw_data; in adf_probe() local 112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 114 if (!hw_data) { in adf_probe() 118 accel_dev->hw_device = hw_data; in adf_probe() 122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_c3xxxvf/ |
D | adf_c3xxxvf_hw_data.c | 63 void adf_init_hw_data_c3xxxiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxxiov() argument 65 hw_data->dev_class = &c3xxxiov_class; in adf_init_hw_data_c3xxxiov() 66 hw_data->num_banks = ADF_C3XXXIOV_ETR_MAX_BANKS; in adf_init_hw_data_c3xxxiov() 67 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxxiov() 68 hw_data->num_accel = ADF_C3XXXIOV_MAX_ACCELERATORS; in adf_init_hw_data_c3xxxiov() 69 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxxiov() 70 hw_data->num_engines = ADF_C3XXXIOV_MAX_ACCELENGINES; in adf_init_hw_data_c3xxxiov() 71 hw_data->tx_rx_gap = ADF_C3XXXIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxxiov() 72 hw_data->tx_rings_mask = ADF_C3XXXIOV_TX_RINGS_MASK; in adf_init_hw_data_c3xxxiov() 73 hw_data->ring_to_svc_map = ADF_GEN2_DEFAULT_RING_TO_SRV_MAP; in adf_init_hw_data_c3xxxiov() [all …]
|
D | adf_drv.c | 79 struct adf_hw_device_data *hw_data; in adf_probe() local 112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 114 if (!hw_data) { in adf_probe() 118 accel_dev->hw_device = hw_data; in adf_probe() 122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_4xxx/ |
D | adf_4xxx_hw_data.c | 392 void adf_init_hw_data_4xxx(struct adf_hw_device_data *hw_data, u32 dev_id) in adf_init_hw_data_4xxx() argument 394 hw_data->dev_class = &adf_4xxx_class; in adf_init_hw_data_4xxx() 395 hw_data->instance_id = adf_4xxx_class.instances++; in adf_init_hw_data_4xxx() 396 hw_data->num_banks = ADF_GEN4_ETR_MAX_BANKS; in adf_init_hw_data_4xxx() 397 hw_data->num_banks_per_vf = ADF_GEN4_NUM_BANKS_PER_VF; in adf_init_hw_data_4xxx() 398 hw_data->num_rings_per_bank = ADF_GEN4_NUM_RINGS_PER_BANK; in adf_init_hw_data_4xxx() 399 hw_data->num_accel = ADF_GEN4_MAX_ACCELERATORS; in adf_init_hw_data_4xxx() 400 hw_data->num_engines = ADF_4XXX_MAX_ACCELENGINES; in adf_init_hw_data_4xxx() 401 hw_data->num_logical_accel = 1; in adf_init_hw_data_4xxx() 402 hw_data->tx_rx_gap = ADF_GEN4_RX_RINGS_OFFSET; in adf_init_hw_data_4xxx() [all …]
|
D | adf_drv.c | 39 struct adf_hw_device_data *hw_data; in adf_probe() local 74 hw_data = devm_kzalloc(&pdev->dev, sizeof(*hw_data), GFP_KERNEL); in adf_probe() 75 if (!hw_data) { in adf_probe() 80 accel_dev->hw_device = hw_data; in adf_probe() 84 pci_read_config_dword(pdev, ADF_GEN4_FUSECTL4_OFFSET, &hw_data->fuses); in adf_probe() 87 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 88 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 89 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe() 91 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe() 92 (~hw_data->ae_mask & 0x01)) { in adf_probe() [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_dh895xcc/ |
D | adf_dh895xcc_hw_data.c | 216 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcc() argument 218 hw_data->dev_class = &dh895xcc_class; in adf_init_hw_data_dh895xcc() 219 hw_data->instance_id = dh895xcc_class.instances++; in adf_init_hw_data_dh895xcc() 220 hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcc() 221 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcc() 222 hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcc() 223 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcc() 224 hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcc() 225 hw_data->tx_rx_gap = ADF_GEN2_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcc() 226 hw_data->tx_rings_mask = ADF_GEN2_TX_RINGS_MASK; in adf_init_hw_data_dh895xcc() [all …]
|
D | adf_drv.c | 78 struct adf_hw_device_data *hw_data; in adf_probe() local 118 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe() 120 if (!hw_data) { in adf_probe() 125 accel_dev->hw_device = hw_data; in adf_probe() 129 &hw_data->fuses); in adf_probe() 132 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 133 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 134 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe() 136 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe() 137 ((~hw_data->ae_mask) & 0x01)) { in adf_probe() [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_420xx/ |
D | adf_420xx_hw_data.c | 158 struct adf_hw_device_data *hw_data = GET_HW_DATA(accel_dev); in update_ae_mask() local 164 ae_mask = get_ae_mask(hw_data); in update_ae_mask() 174 hw_data->ae_mask = ae_mask & config_ae_mask; in update_ae_mask() 426 void adf_init_hw_data_420xx(struct adf_hw_device_data *hw_data, u32 dev_id) in adf_init_hw_data_420xx() argument 428 hw_data->dev_class = &adf_420xx_class; in adf_init_hw_data_420xx() 429 hw_data->instance_id = adf_420xx_class.instances++; in adf_init_hw_data_420xx() 430 hw_data->num_banks = ADF_GEN4_ETR_MAX_BANKS; in adf_init_hw_data_420xx() 431 hw_data->num_banks_per_vf = ADF_GEN4_NUM_BANKS_PER_VF; in adf_init_hw_data_420xx() 432 hw_data->num_rings_per_bank = ADF_GEN4_NUM_RINGS_PER_BANK; in adf_init_hw_data_420xx() 433 hw_data->num_accel = ADF_GEN4_MAX_ACCELERATORS; in adf_init_hw_data_420xx() [all …]
|
D | adf_drv.c | 37 struct adf_hw_device_data *hw_data; in adf_probe() local 72 hw_data = devm_kzalloc(&pdev->dev, sizeof(*hw_data), GFP_KERNEL); in adf_probe() 73 if (!hw_data) { in adf_probe() 78 accel_dev->hw_device = hw_data; in adf_probe() 82 pci_read_config_dword(pdev, ADF_GEN4_FUSECTL4_OFFSET, &hw_data->fuses); in adf_probe() 85 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe() 86 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe() 87 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe() 89 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe() 90 (~hw_data->ae_mask & 0x01)) { in adf_probe() [all …]
|
/linux-6.12.1/drivers/video/backlight/ |
D | apple_bl.c | 31 struct hw_data { struct 40 static const struct hw_data *hw_data; argument 81 static const struct hw_data intel_chipset_data = { 126 static const struct hw_data nvidia_chipset_data = { 151 hw_data = &intel_chipset_data; in apple_bl_add() 153 hw_data = &nvidia_chipset_data; in apple_bl_add() 157 if (!hw_data) { in apple_bl_add() 164 intensity = hw_data->backlight_ops.get_brightness(NULL); in apple_bl_add() 167 hw_data->set_brightness(1); in apple_bl_add() 168 if (!hw_data->backlight_ops.get_brightness(NULL)) in apple_bl_add() [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_common/ |
D | adf_init.c | 67 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init() local 70 if (!hw_data) { in adf_dev_init() 87 if (hw_data->init_device && hw_data->init_device(accel_dev)) { in adf_dev_init() 92 if (hw_data->init_admin_comms && hw_data->init_admin_comms(accel_dev)) { in adf_dev_init() 97 if (hw_data->init_arb && hw_data->init_arb(accel_dev)) { in adf_dev_init() 102 if (hw_data->get_ring_to_svc_map) in adf_dev_init() 103 hw_data->ring_to_svc_map = hw_data->get_ring_to_svc_map(accel_dev); in adf_dev_init() 119 if (hw_data->alloc_irq(accel_dev)) { in adf_dev_init() 125 if (hw_data->ras_ops.enable_ras_errors) in adf_dev_init() 126 hw_data->ras_ops.enable_ras_errors(accel_dev); in adf_dev_init() [all …]
|
D | adf_hw_arbiter.c | 20 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() local 22 unsigned long ae_mask = hw_data->ae_mask; in adf_init_arb() 28 hw_data->get_arb_info(&info); in adf_init_arb() 39 thd_2_arb_cfg = hw_data->get_arb_mapping(accel_dev); in adf_init_arb() 41 for_each_set_bit(i, &ae_mask, hw_data->num_engines) in adf_init_arb() 51 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_update_ring_arb() local 53 u32 tx_ring_mask = hw_data->tx_rings_mask; in adf_update_ring_arb() 54 u32 shift = hw_data->tx_rx_gap; in adf_update_ring_arb() 75 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_exit_arb() local 82 hw_data->get_arb_info(&info); in adf_exit_arb() [all …]
|
D | adf_isr.c | 24 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix() local 25 u32 msix_num_entries = hw_data->num_banks + 1; in adf_enable_msix() 28 if (hw_data->set_msix_rttable) in adf_enable_msix() 29 hw_data->set_msix_rttable(accel_dev); in adf_enable_msix() 126 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_handle_pm_int() local 128 if (hw_data->handle_pm_interrupt && in adf_handle_pm_int() 129 hw_data->handle_pm_interrupt(accel_dev)) in adf_handle_pm_int() 180 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_free_irqs() local 183 int clust_irq = hw_data->num_banks; in adf_free_irqs() 187 for (i = 0; i < hw_data->num_banks; i++) { in adf_free_irqs() [all …]
|
D | adf_gen4_hw_data.c | 227 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen4_ring_pair_reset() local 231 if (bank_number >= hw_data->num_banks) in adf_gen4_ring_pair_reset() 282 struct adf_hw_device_data *hw_data = GET_HW_DATA(accel_dev); in adf_gen4_init_thd2arb_map() local 283 u32 *thd2arb_map = hw_data->thd_to_arb_map; in adf_gen4_init_thd2arb_map() 290 if (!hw_data->get_rp_group || !hw_data->get_ena_thd_mask || in adf_gen4_init_thd2arb_map() 291 !hw_data->get_num_aes || !hw_data->uof_get_num_objs || in adf_gen4_init_thd2arb_map() 292 !hw_data->uof_get_ae_mask) in adf_gen4_init_thd2arb_map() 299 ae_cnt = hw_data->get_num_aes(hw_data); in adf_gen4_init_thd2arb_map() 300 worker_obj_cnt = hw_data->uof_get_num_objs(accel_dev) - in adf_gen4_init_thd2arb_map() 313 ae_mask = hw_data->uof_get_ae_mask(accel_dev, i); in adf_gen4_init_thd2arb_map() [all …]
|
D | adf_gen2_hw_data.c | 28 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_enable_error_correction() local 30 unsigned long accel_mask = hw_data->accel_mask; in adf_gen2_enable_error_correction() 31 unsigned long ae_mask = hw_data->ae_mask; in adf_gen2_enable_error_correction() 35 for_each_set_bit(i, &ae_mask, hw_data->num_engines) { in adf_gen2_enable_error_correction() 45 for_each_set_bit(i, &accel_mask, hw_data->num_accel) { in adf_gen2_enable_error_correction() 116 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_get_accel_cap() local 118 u32 straps = hw_data->straps; in adf_gen2_get_accel_cap() 119 u32 fuses = hw_data->fuses; in adf_gen2_get_accel_cap() 156 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_set_ssm_wdtimer() local 160 unsigned long accel_mask = hw_data->accel_mask; in adf_gen2_set_ssm_wdtimer() [all …]
|
/linux-6.12.1/drivers/clk/ |
D | clk-lan966x.c | 189 struct clk_hw_onecell_data *hw_data, in lan966x_gate_clk_register() argument 197 hw_data->hws[i] = in lan966x_gate_clk_register() 203 if (IS_ERR(hw_data->hws[i])) in lan966x_gate_clk_register() 204 return dev_err_probe(dev, PTR_ERR(hw_data->hws[i]), in lan966x_gate_clk_register() 214 struct clk_hw_onecell_data *hw_data; in lan966x_clk_probe() local 220 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, N_CLOCKS), in lan966x_clk_probe() 222 if (!hw_data) in lan966x_clk_probe() 231 hw_data->num = GCK_GATE_UHPHS; in lan966x_clk_probe() 235 hw_data->hws[i] = lan966x_gck_clk_register(dev, i); in lan966x_clk_probe() 236 if (IS_ERR(hw_data->hws[i])) { in lan966x_clk_probe() [all …]
|
/linux-6.12.1/net/core/ |
D | drop_monitor.c | 304 net_dm_hw_reset_per_cpu_data(struct per_cpu_dm_data *hw_data) in net_dm_hw_reset_per_cpu_data() argument 316 mod_timer(&hw_data->send_timer, jiffies + HZ / 10); in net_dm_hw_reset_per_cpu_data() 319 raw_spin_lock_irqsave(&hw_data->lock, flags); in net_dm_hw_reset_per_cpu_data() 320 swap(hw_data->hw_entries, hw_entries); in net_dm_hw_reset_per_cpu_data() 321 raw_spin_unlock_irqrestore(&hw_data->lock, flags); in net_dm_hw_reset_per_cpu_data() 412 struct per_cpu_dm_data *hw_data; in net_dm_hw_summary_work() local 416 hw_data = container_of(work, struct per_cpu_dm_data, dm_alert_work); in net_dm_hw_summary_work() 418 hw_entries = net_dm_hw_reset_per_cpu_data(hw_data); in net_dm_hw_summary_work() 445 struct per_cpu_dm_data *hw_data; in net_dm_hw_trap_summary_probe() local 452 hw_data = this_cpu_ptr(&dm_hw_cpu_data); in net_dm_hw_trap_summary_probe() [all …]
|
/linux-6.12.1/drivers/clk/uniphier/ |
D | clk-uniphier-core.c | 44 struct clk_hw_onecell_data *hw_data; in uniphier_clk_probe() local 66 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, clk_num), in uniphier_clk_probe() 68 if (!hw_data) in uniphier_clk_probe() 71 hw_data->num = clk_num; in uniphier_clk_probe() 75 hw_data->hws[clk_num] = ERR_PTR(-EINVAL); in uniphier_clk_probe() 86 hw_data->hws[p->idx] = hw; in uniphier_clk_probe() 90 hw_data); in uniphier_clk_probe()
|