This source file includes following definitions.
- bcma_core_pcie2_cfg_read
- bcma_core_pcie2_cfg_write
- bcma_core_pcie2_war_delay_perst_enab
- bcma_core_pcie2_set_ltr_vals
- bcma_core_pcie2_hw_ltr_war
- pciedev_crwlpciegen2
- pciedev_crwlpciegen2_180
- pciedev_crwlpciegen2_182
- pciedev_reg_pm_clk_period
- bcma_core_pcie2_init
- bcma_core_pcie2_up
1
2
3
4
5
6
7
8
9
10
11 #include "bcma_private.h"
12 #include <linux/bcma/bcma.h>
13 #include <linux/pci.h>
14
15
16
17
18
19 #if 0
20 static u32 bcma_core_pcie2_cfg_read(struct bcma_drv_pcie2 *pcie2, u32 addr)
21 {
22 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, addr);
23 pcie2_read32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR);
24 return pcie2_read32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA);
25 }
26 #endif
27
28 static void bcma_core_pcie2_cfg_write(struct bcma_drv_pcie2 *pcie2, u32 addr,
29 u32 val)
30 {
31 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, addr);
32 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, val);
33 }
34
35
36
37
38
39 static u32 bcma_core_pcie2_war_delay_perst_enab(struct bcma_drv_pcie2 *pcie2,
40 bool enable)
41 {
42 u32 val;
43
44
45 val = pcie2_read32(pcie2, BCMA_CORE_PCIE2_CLK_CONTROL);
46 val |= PCIE2_CLKC_DLYPERST;
47 val &= ~PCIE2_CLKC_DISSPROMLD;
48 if (enable) {
49 val &= ~PCIE2_CLKC_DLYPERST;
50 val |= PCIE2_CLKC_DISSPROMLD;
51 }
52 pcie2_write32(pcie2, (BCMA_CORE_PCIE2_CLK_CONTROL), val);
53
54 return pcie2_read32(pcie2, BCMA_CORE_PCIE2_CLK_CONTROL);
55 }
56
57 static void bcma_core_pcie2_set_ltr_vals(struct bcma_drv_pcie2 *pcie2)
58 {
59
60 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, 0x844);
61 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, 0x883c883c);
62
63 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, 0x848);
64 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, 0x88648864);
65
66 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, 0x84C);
67 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, 0x90039003);
68 }
69
70 static void bcma_core_pcie2_hw_ltr_war(struct bcma_drv_pcie2 *pcie2)
71 {
72 u8 core_rev = pcie2->core->id.rev;
73 u32 devstsctr2;
74
75 if (core_rev < 2 || core_rev == 10 || core_rev > 13)
76 return;
77
78 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR,
79 PCIE2_CAP_DEVSTSCTRL2_OFFSET);
80 devstsctr2 = pcie2_read32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA);
81 if (devstsctr2 & PCIE2_CAP_DEVSTSCTRL2_LTRENAB) {
82
83 bcma_core_pcie2_set_ltr_vals(pcie2);
84
85
86
87
88
89
90 devstsctr2 |= PCIE2_CAP_DEVSTSCTRL2_LTRENAB;
91 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR,
92 PCIE2_CAP_DEVSTSCTRL2_OFFSET);
93 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, devstsctr2);
94
95
96 pcie2_write32(pcie2, BCMA_CORE_PCIE2_LTR_STATE,
97 PCIE2_LTR_ACTIVE);
98 usleep_range(1000, 2000);
99
100
101 pcie2_write32(pcie2, BCMA_CORE_PCIE2_LTR_STATE,
102 PCIE2_LTR_SLEEP);
103 usleep_range(1000, 2000);
104 }
105 }
106
107 static void pciedev_crwlpciegen2(struct bcma_drv_pcie2 *pcie2)
108 {
109 u8 core_rev = pcie2->core->id.rev;
110 bool pciewar160, pciewar162;
111
112 pciewar160 = core_rev == 7 || core_rev == 9 || core_rev == 11;
113 pciewar162 = core_rev == 5 || core_rev == 7 || core_rev == 8 ||
114 core_rev == 9 || core_rev == 11;
115
116 if (!pciewar160 && !pciewar162)
117 return;
118
119
120 #if 0
121 pcie2_set32(pcie2, BCMA_CORE_PCIE2_CLK_CONTROL,
122 PCIE_DISABLE_L1CLK_GATING);
123 #if 0
124 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR,
125 PCIEGEN2_COE_PVT_TL_CTRL_0);
126 pcie2_mask32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA,
127 ~(1 << COE_PVT_TL_CTRL_0_PM_DIS_L1_REENTRY_BIT));
128 #endif
129 #endif
130 }
131
132 static void pciedev_crwlpciegen2_180(struct bcma_drv_pcie2 *pcie2)
133 {
134 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, PCIE2_PMCR_REFUP);
135 pcie2_set32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, 0x1f);
136 }
137
138 static void pciedev_crwlpciegen2_182(struct bcma_drv_pcie2 *pcie2)
139 {
140 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR, PCIE2_SBMBX);
141 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, 1 << 0);
142 }
143
144 static void pciedev_reg_pm_clk_period(struct bcma_drv_pcie2 *pcie2)
145 {
146 struct bcma_drv_cc *drv_cc = &pcie2->core->bus->drv_cc;
147 u8 core_rev = pcie2->core->id.rev;
148 u32 alp_khz, pm_value;
149
150 if (core_rev <= 13) {
151 alp_khz = bcma_pmu_get_alp_clock(drv_cc) / 1000;
152 pm_value = (1000000 * 2) / alp_khz;
153 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDADDR,
154 PCIE2_PVT_REG_PM_CLK_PERIOD);
155 pcie2_write32(pcie2, BCMA_CORE_PCIE2_CONFIGINDDATA, pm_value);
156 }
157 }
158
159 void bcma_core_pcie2_init(struct bcma_drv_pcie2 *pcie2)
160 {
161 struct bcma_bus *bus = pcie2->core->bus;
162 struct bcma_chipinfo *ci = &bus->chipinfo;
163 u32 tmp;
164
165 tmp = pcie2_read32(pcie2, BCMA_CORE_PCIE2_SPROM(54));
166 if ((tmp & 0xe) >> 1 == 2)
167 bcma_core_pcie2_cfg_write(pcie2, 0x4e0, 0x17);
168
169 switch (bus->chipinfo.id) {
170 case BCMA_CHIP_ID_BCM4360:
171 case BCMA_CHIP_ID_BCM4352:
172 pcie2->reqsize = 1024;
173 break;
174 default:
175 pcie2->reqsize = 128;
176 break;
177 }
178
179 if (ci->id == BCMA_CHIP_ID_BCM4360 && ci->rev > 3)
180 bcma_core_pcie2_war_delay_perst_enab(pcie2, true);
181 bcma_core_pcie2_hw_ltr_war(pcie2);
182 pciedev_crwlpciegen2(pcie2);
183 pciedev_reg_pm_clk_period(pcie2);
184 pciedev_crwlpciegen2_180(pcie2);
185 pciedev_crwlpciegen2_182(pcie2);
186 }
187
188
189
190
191
192 void bcma_core_pcie2_up(struct bcma_drv_pcie2 *pcie2)
193 {
194 struct bcma_bus *bus = pcie2->core->bus;
195 struct pci_dev *dev = bus->host_pci;
196 int err;
197
198 err = pcie_set_readrq(dev, pcie2->reqsize);
199 if (err)
200 bcma_err(bus, "Error setting PCI_EXP_DEVCTL_READRQ: %d\n", err);
201 }