Lines Matching full:pvt
320 u64 (*get_tolm)(struct sbridge_pvt *pvt);
321 u64 (*get_tohm)(struct sbridge_pvt *pvt);
330 u8 (*get_node_id)(struct sbridge_pvt *pvt);
332 enum mem_type (*get_memory_type)(struct sbridge_pvt *pvt);
333 enum dev_type (*get_width)(struct sbridge_pvt *pvt, u32 mtr);
802 static u64 sbridge_get_tolm(struct sbridge_pvt *pvt) in sbridge_get_tolm() argument
807 pci_read_config_dword(pvt->pci_sad1, TOLM, ®); in sbridge_get_tolm()
811 static u64 sbridge_get_tohm(struct sbridge_pvt *pvt) in sbridge_get_tohm() argument
815 pci_read_config_dword(pvt->pci_sad1, TOHM, ®); in sbridge_get_tohm()
819 static u64 ibridge_get_tolm(struct sbridge_pvt *pvt) in ibridge_get_tolm() argument
823 pci_read_config_dword(pvt->pci_br1, TOLM, ®); in ibridge_get_tolm()
828 static u64 ibridge_get_tohm(struct sbridge_pvt *pvt) in ibridge_get_tohm() argument
832 pci_read_config_dword(pvt->pci_br1, TOHM, ®); in ibridge_get_tohm()
885 static enum mem_type get_memory_type(struct sbridge_pvt *pvt) in get_memory_type() argument
890 if (pvt->pci_ddrio) { in get_memory_type()
891 pci_read_config_dword(pvt->pci_ddrio, pvt->info.rankcfgr, in get_memory_type()
904 static enum mem_type haswell_get_memory_type(struct sbridge_pvt *pvt) in haswell_get_memory_type() argument
910 if (!pvt->pci_ddrio) in haswell_get_memory_type()
913 pci_read_config_dword(pvt->pci_ddrio, in haswell_get_memory_type()
919 pci_read_config_dword(pvt->pci_ta, MCMTR, ®); in haswell_get_memory_type()
936 static enum dev_type knl_get_width(struct sbridge_pvt *pvt, u32 mtr) in knl_get_width() argument
942 static enum dev_type sbridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in sbridge_get_width() argument
967 static enum dev_type ibridge_get_width(struct sbridge_pvt *pvt, u32 mtr) in ibridge_get_width() argument
976 static enum dev_type broadwell_get_width(struct sbridge_pvt *pvt, u32 mtr) in broadwell_get_width() argument
982 static enum mem_type knl_get_memory_type(struct sbridge_pvt *pvt) in knl_get_memory_type() argument
988 static u8 get_node_id(struct sbridge_pvt *pvt) in get_node_id() argument
991 pci_read_config_dword(pvt->pci_br0, SAD_CONTROL, ®); in get_node_id()
995 static u8 haswell_get_node_id(struct sbridge_pvt *pvt) in haswell_get_node_id() argument
999 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in haswell_get_node_id()
1003 static u8 knl_get_node_id(struct sbridge_pvt *pvt) in knl_get_node_id() argument
1007 pci_read_config_dword(pvt->pci_sad1, SAD_CONTROL, ®); in knl_get_node_id()
1045 static u64 haswell_get_tolm(struct sbridge_pvt *pvt) in haswell_get_tolm() argument
1049 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOLM, ®); in haswell_get_tolm()
1053 static u64 haswell_get_tohm(struct sbridge_pvt *pvt) in haswell_get_tohm() argument
1058 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_0, ®); in haswell_get_tohm()
1060 pci_read_config_dword(pvt->info.pci_vtd, HASWELL_TOHM_1, ®); in haswell_get_tohm()
1066 static u64 knl_get_tolm(struct sbridge_pvt *pvt) in knl_get_tolm() argument
1070 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOLM, ®); in knl_get_tolm()
1074 static u64 knl_get_tohm(struct sbridge_pvt *pvt) in knl_get_tohm() argument
1079 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_0, ®_lo); in knl_get_tohm()
1080 pci_read_config_dword(pvt->knl.pci_mc_info, KNL_TOHM_1, ®_hi); in knl_get_tohm()
1143 * @pvt: driver private data
1154 static int knl_get_tad(const struct sbridge_pvt *pvt, in knl_get_tad() argument
1167 pci_mc = pvt->knl.pci_mc0; in knl_get_tad()
1170 pci_mc = pvt->knl.pci_mc1; in knl_get_tad()
1348 static int knl_get_dimm_capacity(struct sbridge_pvt *pvt, u64 *mc_sizes) in knl_get_dimm_capacity() argument
1375 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1401 pci_read_config_dword(pvt->knl.pci_cha[i], in knl_get_dimm_capacity()
1424 for (sad_rule = 0; sad_rule < pvt->info.max_sad; sad_rule++) { in knl_get_dimm_capacity()
1428 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1429 pvt->info.dram_rule[sad_rule], &dram_rule); in knl_get_dimm_capacity()
1436 sad_limit = pvt->info.sad_limit(dram_rule)+1; in knl_get_dimm_capacity()
1438 pci_read_config_dword(pvt->pci_sad0, in knl_get_dimm_capacity()
1439 pvt->info.interleave_list[sad_rule], &interleave_reg); in knl_get_dimm_capacity()
1445 first_pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1448 pkg = sad_pkg(pvt->info.interleave_pkg, in knl_get_dimm_capacity()
1492 if (knl_get_tad(pvt, in knl_get_dimm_capacity()
1573 struct sbridge_pvt *pvt = mci->pvt_info; in get_source_id() local
1576 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL || in get_source_id()
1577 pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1578 pci_read_config_dword(pvt->pci_sad1, SAD_TARGET, ®); in get_source_id()
1580 pci_read_config_dword(pvt->pci_br0, SAD_TARGET, ®); in get_source_id()
1582 if (pvt->info.type == KNIGHTS_LANDING) in get_source_id()
1583 pvt->sbridge_dev->source_id = SOURCE_ID_KNL(reg); in get_source_id()
1585 pvt->sbridge_dev->source_id = SOURCE_ID(reg); in get_source_id()
1592 struct sbridge_pvt *pvt = mci->pvt_info; in __populate_dimms() local
1593 int channels = pvt->info.type == KNIGHTS_LANDING ? KNL_MAX_CHANNELS in __populate_dimms()
1600 mtype = pvt->info.get_memory_type(pvt); in __populate_dimms()
1618 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1620 if (!pvt->knl.pci_channel[i]) in __populate_dimms()
1624 if (!pvt->pci_tad[i]) in __populate_dimms()
1626 pci_read_config_dword(pvt->pci_tad[i], 0x8c, &amap); in __populate_dimms()
1631 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1632 pci_read_config_dword(pvt->knl.pci_channel[i], in __populate_dimms()
1635 pci_read_config_dword(pvt->pci_tad[i], in __populate_dimms()
1641 if (!IS_ECC_ENABLED(pvt->info.mcmtr)) { in __populate_dimms()
1643 pvt->sbridge_dev->source_id, in __populate_dimms()
1644 pvt->sbridge_dev->dom, i); in __populate_dimms()
1647 pvt->channel[i].dimms++; in __populate_dimms()
1649 ranks = numrank(pvt->info.type, mtr); in __populate_dimms()
1651 if (pvt->info.type == KNIGHTS_LANDING) { in __populate_dimms()
1665 pvt->sbridge_dev->mc, pvt->sbridge_dev->dom, i, j, in __populate_dimms()
1671 dimm->dtype = pvt->info.get_width(pvt, mtr); in __populate_dimms()
1674 pvt->channel[i].dimm[j].rowbits = order_base_2(rows); in __populate_dimms()
1675 pvt->channel[i].dimm[j].colbits = order_base_2(cols); in __populate_dimms()
1676 pvt->channel[i].dimm[j].bank_xor_enable = in __populate_dimms()
1677 GET_BITFIELD(pvt->info.mcmtr, 9, 9); in __populate_dimms()
1678 pvt->channel[i].dimm[j].amap_fine = GET_BITFIELD(amap, 0, 0); in __populate_dimms()
1681 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom, i, j); in __populate_dimms()
1691 struct sbridge_pvt *pvt = mci->pvt_info; in get_dimm_config() local
1696 pvt->sbridge_dev->node_id = pvt->info.get_node_id(pvt); in get_dimm_config()
1698 pvt->sbridge_dev->mc, in get_dimm_config()
1699 pvt->sbridge_dev->node_id, in get_dimm_config()
1700 pvt->sbridge_dev->source_id); in get_dimm_config()
1705 if (pvt->info.type == KNIGHTS_LANDING) { in get_dimm_config()
1707 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1708 pvt->is_cur_addr_mirrored = false; in get_dimm_config()
1710 if (knl_get_dimm_capacity(pvt, knl_mc_sizes) != 0) in get_dimm_config()
1712 if (pci_read_config_dword(pvt->pci_ta, KNL_MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1717 if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_dimm_config()
1718 if (pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®)) { in get_dimm_config()
1722 pvt->is_chan_hash = GET_BITFIELD(reg, 21, 21); in get_dimm_config()
1724 pvt->mirror_mode = ADDR_RANGE_MIRRORING; in get_dimm_config()
1729 if (pci_read_config_dword(pvt->pci_ras, RASENABLES, ®)) { in get_dimm_config()
1734 pvt->mirror_mode = FULL_MIRRORING; in get_dimm_config()
1737 pvt->mirror_mode = NON_MIRRORING; in get_dimm_config()
1742 if (pci_read_config_dword(pvt->pci_ta, MCMTR, &pvt->info.mcmtr)) { in get_dimm_config()
1746 if (IS_LOCKSTEP_ENABLED(pvt->info.mcmtr)) { in get_dimm_config()
1749 pvt->is_lockstep = true; in get_dimm_config()
1753 pvt->is_lockstep = false; in get_dimm_config()
1755 if (IS_CLOSE_PG(pvt->info.mcmtr)) { in get_dimm_config()
1757 pvt->is_close_pg = true; in get_dimm_config()
1760 pvt->is_close_pg = false; in get_dimm_config()
1769 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_layout() local
1781 pvt->tolm = pvt->info.get_tolm(pvt); in get_memory_layout()
1782 tmp_mb = (1 + pvt->tolm) >> 20; in get_memory_layout()
1786 gb, (mb*1000)/1024, (u64)pvt->tolm); in get_memory_layout()
1789 pvt->tohm = pvt->info.get_tohm(pvt); in get_memory_layout()
1790 tmp_mb = (1 + pvt->tohm) >> 20; in get_memory_layout()
1794 gb, (mb*1000)/1024, (u64)pvt->tohm); in get_memory_layout()
1803 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_layout()
1805 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_layout()
1807 limit = pvt->info.sad_limit(reg); in get_memory_layout()
1819 show_dram_attr(pvt->info.dram_attr(reg)), in get_memory_layout()
1822 get_intlv_mode_str(reg, pvt->info.type), in get_memory_layout()
1826 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_layout()
1828 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_layout()
1830 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, j); in get_memory_layout()
1839 if (pvt->info.type == KNIGHTS_LANDING) in get_memory_layout()
1847 pci_read_config_dword(pvt->pci_ha, tad_dram_rule[n_tads], ®); in get_memory_layout()
1871 if (!pvt->channel[i].dimms) in get_memory_layout()
1874 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1891 if (!pvt->channel[i].dimms) in get_memory_layout()
1894 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1901 tmp_mb = pvt->info.rir_limit(reg) >> 20; in get_memory_layout()
1912 pci_read_config_dword(pvt->pci_tad[i], in get_memory_layout()
1915 tmp_mb = RIR_OFFSET(pvt->info.type, reg) << 6; in get_memory_layout()
1922 (u32)RIR_RNK_TGT(pvt->info.type, reg), in get_memory_layout()
1984 struct sbridge_pvt *pvt; in sb_decode_ddr4() local
1989 pvt = mci->pvt_info; in sb_decode_ddr4()
1990 amap_fine = pvt->channel[ch].dimm[dimmno].amap_fine; in sb_decode_ddr4()
1992 rowbits = pvt->channel[ch].dimm[dimmno].rowbits; in sb_decode_ddr4()
1993 colbits = pvt->channel[ch].dimm[dimmno].colbits; in sb_decode_ddr4()
1994 bank_xor_enable = pvt->channel[ch].dimm[dimmno].bank_xor_enable; in sb_decode_ddr4()
1996 if (pvt->is_lockstep) { in sb_decode_ddr4()
2002 if (pvt->is_close_pg) { in sb_decode_ddr4()
2041 struct sbridge_pvt *pvt = mci->pvt_info; in get_memory_error_data() local
2063 if ((addr > (u64) pvt->tolm) && (addr < (1LL << 32))) { in get_memory_error_data()
2067 if (addr >= (u64)pvt->tohm) { in get_memory_error_data()
2075 for (n_sads = 0; n_sads < pvt->info.max_sad; n_sads++) { in get_memory_error_data()
2076 pci_read_config_dword(pvt->pci_sad0, pvt->info.dram_rule[n_sads], in get_memory_error_data()
2082 limit = pvt->info.sad_limit(reg); in get_memory_error_data()
2091 if (n_sads == pvt->info.max_sad) { in get_memory_error_data()
2096 *area_type = show_dram_attr(pvt->info.dram_attr(dram_rule)); in get_memory_error_data()
2097 interleave_mode = pvt->info.interleave_mode(dram_rule); in get_memory_error_data()
2099 pci_read_config_dword(pvt->pci_sad0, pvt->info.interleave_list[n_sads], in get_memory_error_data()
2102 if (pvt->info.type == SANDY_BRIDGE) { in get_memory_error_data()
2103 sad_interl = sad_pkg(pvt->info.interleave_pkg, reg, 0); in get_memory_error_data()
2105 u32 pkg = sad_pkg(pvt->info.interleave_pkg, reg, sad_way); in get_memory_error_data()
2113 pvt->sbridge_dev->mc, in get_memory_error_data()
2142 } else if (pvt->info.type == HASWELL || pvt->info.type == BROADWELL) { in get_memory_error_data()
2159 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2165 pci_read_config_dword(pvt->pci_ha, HASWELL_HASYSDEFEATURE2, ®); in get_memory_error_data()
2174 pkg = sad_pkg(pvt->info.interleave_pkg, reg, idx); in get_memory_error_data()
2194 pvt = mci->pvt_info; in get_memory_error_data()
2200 pci_ha = pvt->pci_ha; in get_memory_error_data()
2224 if (pvt->is_chan_hash) in get_memory_error_data()
2251 pci_read_config_dword(pvt->pci_tad[base_ch], tad_ch_nilv_offset[n_tads], &tad_offset); in get_memory_error_data()
2253 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data()
2254 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && n_tads == 0)) { in get_memory_error_data()
2266 pvt->is_cur_addr_mirrored = true; in get_memory_error_data()
2269 pvt->is_cur_addr_mirrored = false; in get_memory_error_data()
2272 if (pvt->is_lockstep) in get_memory_error_data()
2307 pci_read_config_dword(pvt->pci_tad[base_ch], rir_way_limit[n_rir], ®); in get_memory_error_data()
2312 limit = pvt->info.rir_limit(reg); in get_memory_error_data()
2329 if (pvt->is_close_pg) in get_memory_error_data()
2335 pci_read_config_dword(pvt->pci_tad[base_ch], rir_offset[n_rir][idx], ®); in get_memory_error_data()
2336 *rank = RIR_RNK_TGT(pvt->info.type, reg); in get_memory_error_data()
2338 if (pvt->info.type == BROADWELL) { in get_memory_error_data()
2339 if (pvt->is_close_pg) in get_memory_error_data()
2348 rank_addr -= RIR_OFFSET(pvt->info.type, reg); in get_memory_error_data()
2350 mtype = pvt->info.get_memory_type(pvt); in get_memory_error_data()
2377 struct sbridge_pvt *pvt; in get_memory_error_data_from_mce() local
2386 pvt = mci->pvt_info; in get_memory_error_data_from_mce()
2387 if (!pvt->info.get_ha) { in get_memory_error_data_from_mce()
2391 *ha = pvt->info.get_ha(m->bank); in get_memory_error_data_from_mce()
2404 pvt = new_mci->pvt_info; in get_memory_error_data_from_mce()
2405 pci_ha = pvt->pci_ha; in get_memory_error_data_from_mce()
2410 if (pvt->mirror_mode == FULL_MIRRORING || in get_memory_error_data_from_mce()
2411 (pvt->mirror_mode == ADDR_RANGE_MIRRORING && tad0)) { in get_memory_error_data_from_mce()
2413 pvt->is_cur_addr_mirrored = true; in get_memory_error_data_from_mce()
2415 pvt->is_cur_addr_mirrored = false; in get_memory_error_data_from_mce()
2418 if (pvt->is_lockstep) in get_memory_error_data_from_mce()
2620 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mci_bind_devs() local
2632 pvt->pci_sad0 = pdev; in sbridge_mci_bind_devs()
2635 pvt->pci_sad1 = pdev; in sbridge_mci_bind_devs()
2638 pvt->pci_br0 = pdev; in sbridge_mci_bind_devs()
2641 pvt->pci_ha = pdev; in sbridge_mci_bind_devs()
2644 pvt->pci_ta = pdev; in sbridge_mci_bind_devs()
2647 pvt->pci_ras = pdev; in sbridge_mci_bind_devs()
2655 pvt->pci_tad[id] = pdev; in sbridge_mci_bind_devs()
2660 pvt->pci_ddrio = pdev; in sbridge_mci_bind_devs()
2673 if (!pvt->pci_sad0 || !pvt->pci_sad1 || !pvt->pci_ha || in sbridge_mci_bind_devs()
2674 !pvt->pci_ras || !pvt->pci_ta) in sbridge_mci_bind_devs()
2694 struct sbridge_pvt *pvt = mci->pvt_info; in ibridge_mci_bind_devs() local
2707 pvt->pci_ha = pdev; in ibridge_mci_bind_devs()
2711 pvt->pci_ta = pdev; in ibridge_mci_bind_devs()
2715 pvt->pci_ras = pdev; in ibridge_mci_bind_devs()
2727 pvt->pci_tad[id] = pdev; in ibridge_mci_bind_devs()
2732 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2735 pvt->pci_ddrio = pdev; in ibridge_mci_bind_devs()
2738 pvt->pci_sad0 = pdev; in ibridge_mci_bind_devs()
2741 pvt->pci_br0 = pdev; in ibridge_mci_bind_devs()
2744 pvt->pci_br1 = pdev; in ibridge_mci_bind_devs()
2757 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_br0 || in ibridge_mci_bind_devs()
2758 !pvt->pci_br1 || !pvt->pci_ras || !pvt->pci_ta) in ibridge_mci_bind_devs()
2780 struct sbridge_pvt *pvt = mci->pvt_info; in haswell_mci_bind_devs() local
2786 if (pvt->info.pci_vtd == NULL) in haswell_mci_bind_devs()
2788 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in haswell_mci_bind_devs()
2799 pvt->pci_sad0 = pdev; in haswell_mci_bind_devs()
2802 pvt->pci_sad1 = pdev; in haswell_mci_bind_devs()
2806 pvt->pci_ha = pdev; in haswell_mci_bind_devs()
2810 pvt->pci_ta = pdev; in haswell_mci_bind_devs()
2814 pvt->pci_ras = pdev; in haswell_mci_bind_devs()
2826 pvt->pci_tad[id] = pdev; in haswell_mci_bind_devs()
2834 if (!pvt->pci_ddrio) in haswell_mci_bind_devs()
2835 pvt->pci_ddrio = pdev; in haswell_mci_bind_devs()
2848 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in haswell_mci_bind_devs()
2849 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in haswell_mci_bind_devs()
2865 struct sbridge_pvt *pvt = mci->pvt_info; in broadwell_mci_bind_devs() local
2871 if (pvt->info.pci_vtd == NULL) in broadwell_mci_bind_devs()
2873 pvt->info.pci_vtd = pci_get_device(PCI_VENDOR_ID_INTEL, in broadwell_mci_bind_devs()
2884 pvt->pci_sad0 = pdev; in broadwell_mci_bind_devs()
2887 pvt->pci_sad1 = pdev; in broadwell_mci_bind_devs()
2891 pvt->pci_ha = pdev; in broadwell_mci_bind_devs()
2895 pvt->pci_ta = pdev; in broadwell_mci_bind_devs()
2899 pvt->pci_ras = pdev; in broadwell_mci_bind_devs()
2911 pvt->pci_tad[id] = pdev; in broadwell_mci_bind_devs()
2916 pvt->pci_ddrio = pdev; in broadwell_mci_bind_devs()
2929 if (!pvt->pci_sad0 || !pvt->pci_ha || !pvt->pci_sad1 || in broadwell_mci_bind_devs()
2930 !pvt->pci_ras || !pvt->pci_ta || !pvt->info.pci_vtd) in broadwell_mci_bind_devs()
2946 struct sbridge_pvt *pvt = mci->pvt_info; in knl_mci_bind_devs() local
2965 pvt->knl.pci_mc0 = pdev; in knl_mci_bind_devs()
2967 pvt->knl.pci_mc1 = pdev; in knl_mci_bind_devs()
2977 pvt->pci_sad0 = pdev; in knl_mci_bind_devs()
2981 pvt->pci_sad1 = pdev; in knl_mci_bind_devs()
2997 WARN_ON(pvt->knl.pci_cha[devidx] != NULL); in knl_mci_bind_devs()
2999 pvt->knl.pci_cha[devidx] = pdev; in knl_mci_bind_devs()
3022 WARN_ON(pvt->knl.pci_channel[devidx] != NULL); in knl_mci_bind_devs()
3023 pvt->knl.pci_channel[devidx] = pdev; in knl_mci_bind_devs()
3027 pvt->knl.pci_mc_info = pdev; in knl_mci_bind_devs()
3031 pvt->pci_ta = pdev; in knl_mci_bind_devs()
3041 if (!pvt->knl.pci_mc0 || !pvt->knl.pci_mc1 || in knl_mci_bind_devs()
3042 !pvt->pci_sad0 || !pvt->pci_sad1 || in knl_mci_bind_devs()
3043 !pvt->pci_ta) { in knl_mci_bind_devs()
3048 if (!pvt->knl.pci_channel[i]) { in knl_mci_bind_devs()
3055 if (!pvt->knl.pci_cha[i]) { in knl_mci_bind_devs()
3082 struct sbridge_pvt *pvt = mci->pvt_info; in sbridge_mce_output_error() local
3104 if (pvt->info.type != SANDY_BRIDGE) in sbridge_mce_output_error()
3152 if (pvt->info.type == KNIGHTS_LANDING) { in sbridge_mce_output_error()
3201 pvt = mci->pvt_info; in sbridge_mce_output_error()
3220 if (!pvt->is_lockstep && !pvt->is_cur_addr_mirrored && !pvt->is_close_pg) in sbridge_mce_output_error()
3352 struct sbridge_pvt *pvt; in sbridge_register_mci() local
3365 sizeof(*pvt)); in sbridge_register_mci()
3373 pvt = mci->pvt_info; in sbridge_register_mci()
3374 memset(pvt, 0, sizeof(*pvt)); in sbridge_register_mci()
3377 pvt->sbridge_dev = sbridge_dev; in sbridge_register_mci()
3388 pvt->info.type = type; in sbridge_register_mci()
3391 pvt->info.rankcfgr = IB_RANK_CFG_A; in sbridge_register_mci()
3392 pvt->info.get_tolm = ibridge_get_tolm; in sbridge_register_mci()
3393 pvt->info.get_tohm = ibridge_get_tohm; in sbridge_register_mci()
3394 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3395 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3396 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3397 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3398 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3399 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3400 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3401 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3402 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3403 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3404 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3405 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3413 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3416 pvt->info.rankcfgr = SB_RANK_CFG_A; in sbridge_register_mci()
3417 pvt->info.get_tolm = sbridge_get_tolm; in sbridge_register_mci()
3418 pvt->info.get_tohm = sbridge_get_tohm; in sbridge_register_mci()
3419 pvt->info.dram_rule = sbridge_dram_rule; in sbridge_register_mci()
3420 pvt->info.get_memory_type = get_memory_type; in sbridge_register_mci()
3421 pvt->info.get_node_id = get_node_id; in sbridge_register_mci()
3422 pvt->info.get_ha = sbridge_get_ha; in sbridge_register_mci()
3423 pvt->info.rir_limit = rir_limit; in sbridge_register_mci()
3424 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3425 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3426 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3427 pvt->info.max_sad = ARRAY_SIZE(sbridge_dram_rule); in sbridge_register_mci()
3428 pvt->info.interleave_list = sbridge_interleave_list; in sbridge_register_mci()
3429 pvt->info.interleave_pkg = sbridge_interleave_pkg; in sbridge_register_mci()
3430 pvt->info.get_width = sbridge_get_width; in sbridge_register_mci()
3438 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3442 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3443 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3444 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3445 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3446 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3447 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3448 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3449 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3450 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3451 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3452 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3453 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3454 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3455 pvt->info.get_width = ibridge_get_width; in sbridge_register_mci()
3463 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3467 pvt->info.get_tolm = haswell_get_tolm; in sbridge_register_mci()
3468 pvt->info.get_tohm = haswell_get_tohm; in sbridge_register_mci()
3469 pvt->info.dram_rule = ibridge_dram_rule; in sbridge_register_mci()
3470 pvt->info.get_memory_type = haswell_get_memory_type; in sbridge_register_mci()
3471 pvt->info.get_node_id = haswell_get_node_id; in sbridge_register_mci()
3472 pvt->info.get_ha = ibridge_get_ha; in sbridge_register_mci()
3473 pvt->info.rir_limit = haswell_rir_limit; in sbridge_register_mci()
3474 pvt->info.sad_limit = sad_limit; in sbridge_register_mci()
3475 pvt->info.interleave_mode = interleave_mode; in sbridge_register_mci()
3476 pvt->info.dram_attr = dram_attr; in sbridge_register_mci()
3477 pvt->info.max_sad = ARRAY_SIZE(ibridge_dram_rule); in sbridge_register_mci()
3478 pvt->info.interleave_list = ibridge_interleave_list; in sbridge_register_mci()
3479 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3480 pvt->info.get_width = broadwell_get_width; in sbridge_register_mci()
3488 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()
3491 /* pvt->info.rankcfgr == ??? */ in sbridge_register_mci()
3492 pvt->info.get_tolm = knl_get_tolm; in sbridge_register_mci()
3493 pvt->info.get_tohm = knl_get_tohm; in sbridge_register_mci()
3494 pvt->info.dram_rule = knl_dram_rule; in sbridge_register_mci()
3495 pvt->info.get_memory_type = knl_get_memory_type; in sbridge_register_mci()
3496 pvt->info.get_node_id = knl_get_node_id; in sbridge_register_mci()
3497 pvt->info.get_ha = knl_get_ha; in sbridge_register_mci()
3498 pvt->info.rir_limit = NULL; in sbridge_register_mci()
3499 pvt->info.sad_limit = knl_sad_limit; in sbridge_register_mci()
3500 pvt->info.interleave_mode = knl_interleave_mode; in sbridge_register_mci()
3501 pvt->info.dram_attr = dram_attr_knl; in sbridge_register_mci()
3502 pvt->info.max_sad = ARRAY_SIZE(knl_dram_rule); in sbridge_register_mci()
3503 pvt->info.interleave_list = knl_interleave_list; in sbridge_register_mci()
3504 pvt->info.interleave_pkg = ibridge_interleave_pkg; in sbridge_register_mci()
3505 pvt->info.get_width = knl_get_width; in sbridge_register_mci()
3512 pvt->sbridge_dev->source_id, pvt->sbridge_dev->dom); in sbridge_register_mci()