2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 * $FreeBSD: head/sys/dev/drm2/radeon/evergreen.c 254885 2013-08-25 19:37:15Z dumbbell $
29 #include "radeon_asic.h"
30 #include <uapi_drm/radeon_drm.h>
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
37 #define EVERGREEN_PFP_UCODE_SIZE 1120
38 #define EVERGREEN_PM4_UCODE_SIZE 1376
40 static const u32 crtc_offsets[6] =
42 EVERGREEN_CRTC0_REGISTER_OFFSET,
43 EVERGREEN_CRTC1_REGISTER_OFFSET,
44 EVERGREEN_CRTC2_REGISTER_OFFSET,
45 EVERGREEN_CRTC3_REGISTER_OFFSET,
46 EVERGREEN_CRTC4_REGISTER_OFFSET,
47 EVERGREEN_CRTC5_REGISTER_OFFSET
50 static void evergreen_gpu_init(struct radeon_device *rdev);
51 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
52 void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
53 bool evergreen_is_display_hung(struct radeon_device *rdev);
55 static const u32 evergreen_golden_registers[] =
57 0x3f90, 0xffff0000, 0xff000000,
58 0x9148, 0xffff0000, 0xff000000,
59 0x3f94, 0xffff0000, 0xff000000,
60 0x914c, 0xffff0000, 0xff000000,
61 0x9b7c, 0xffffffff, 0x00000000,
62 0x8a14, 0xffffffff, 0x00000007,
63 0x8b10, 0xffffffff, 0x00000000,
64 0x960c, 0xffffffff, 0x54763210,
65 0x88c4, 0xffffffff, 0x000000c2,
66 0x88d4, 0xffffffff, 0x00000010,
67 0x8974, 0xffffffff, 0x00000000,
68 0xc78, 0x00000080, 0x00000080,
69 0x5eb4, 0xffffffff, 0x00000002,
70 0x5e78, 0xffffffff, 0x001000f0,
71 0x6104, 0x01000300, 0x00000000,
72 0x5bc0, 0x00300000, 0x00000000,
73 0x7030, 0xffffffff, 0x00000011,
74 0x7c30, 0xffffffff, 0x00000011,
75 0x10830, 0xffffffff, 0x00000011,
76 0x11430, 0xffffffff, 0x00000011,
77 0x12030, 0xffffffff, 0x00000011,
78 0x12c30, 0xffffffff, 0x00000011,
79 0xd02c, 0xffffffff, 0x08421000,
80 0x240c, 0xffffffff, 0x00000380,
81 0x8b24, 0xffffffff, 0x00ff0fff,
82 0x28a4c, 0x06000000, 0x06000000,
83 0x10c, 0x00000001, 0x00000001,
84 0x8d00, 0xffffffff, 0x100e4848,
85 0x8d04, 0xffffffff, 0x00164745,
86 0x8c00, 0xffffffff, 0xe4000003,
87 0x8c04, 0xffffffff, 0x40600060,
88 0x8c08, 0xffffffff, 0x001c001c,
89 0x8cf0, 0xffffffff, 0x08e00620,
90 0x8c20, 0xffffffff, 0x00800080,
91 0x8c24, 0xffffffff, 0x00800080,
92 0x8c18, 0xffffffff, 0x20202078,
93 0x8c1c, 0xffffffff, 0x00001010,
94 0x28350, 0xffffffff, 0x00000000,
95 0xa008, 0xffffffff, 0x00010000,
96 0x5cc, 0xffffffff, 0x00000001,
97 0x9508, 0xffffffff, 0x00000002,
98 0x913c, 0x0000000f, 0x0000000a
101 static const u32 evergreen_golden_registers2[] =
103 0x2f4c, 0xffffffff, 0x00000000,
104 0x54f4, 0xffffffff, 0x00000000,
105 0x54f0, 0xffffffff, 0x00000000,
106 0x5498, 0xffffffff, 0x00000000,
107 0x549c, 0xffffffff, 0x00000000,
108 0x5494, 0xffffffff, 0x00000000,
109 0x53cc, 0xffffffff, 0x00000000,
110 0x53c8, 0xffffffff, 0x00000000,
111 0x53c4, 0xffffffff, 0x00000000,
112 0x53c0, 0xffffffff, 0x00000000,
113 0x53bc, 0xffffffff, 0x00000000,
114 0x53b8, 0xffffffff, 0x00000000,
115 0x53b4, 0xffffffff, 0x00000000,
116 0x53b0, 0xffffffff, 0x00000000
119 static const u32 cypress_mgcg_init[] =
121 0x802c, 0xffffffff, 0xc0000000,
122 0x5448, 0xffffffff, 0x00000100,
123 0x55e4, 0xffffffff, 0x00000100,
124 0x160c, 0xffffffff, 0x00000100,
125 0x5644, 0xffffffff, 0x00000100,
126 0xc164, 0xffffffff, 0x00000100,
127 0x8a18, 0xffffffff, 0x00000100,
128 0x897c, 0xffffffff, 0x06000100,
129 0x8b28, 0xffffffff, 0x00000100,
130 0x9144, 0xffffffff, 0x00000100,
131 0x9a60, 0xffffffff, 0x00000100,
132 0x9868, 0xffffffff, 0x00000100,
133 0x8d58, 0xffffffff, 0x00000100,
134 0x9510, 0xffffffff, 0x00000100,
135 0x949c, 0xffffffff, 0x00000100,
136 0x9654, 0xffffffff, 0x00000100,
137 0x9030, 0xffffffff, 0x00000100,
138 0x9034, 0xffffffff, 0x00000100,
139 0x9038, 0xffffffff, 0x00000100,
140 0x903c, 0xffffffff, 0x00000100,
141 0x9040, 0xffffffff, 0x00000100,
142 0xa200, 0xffffffff, 0x00000100,
143 0xa204, 0xffffffff, 0x00000100,
144 0xa208, 0xffffffff, 0x00000100,
145 0xa20c, 0xffffffff, 0x00000100,
146 0x971c, 0xffffffff, 0x00000100,
147 0x977c, 0xffffffff, 0x00000100,
148 0x3f80, 0xffffffff, 0x00000100,
149 0xa210, 0xffffffff, 0x00000100,
150 0xa214, 0xffffffff, 0x00000100,
151 0x4d8, 0xffffffff, 0x00000100,
152 0x9784, 0xffffffff, 0x00000100,
153 0x9698, 0xffffffff, 0x00000100,
154 0x4d4, 0xffffffff, 0x00000200,
155 0x30cc, 0xffffffff, 0x00000100,
156 0xd0c0, 0xffffffff, 0xff000100,
157 0x802c, 0xffffffff, 0x40000000,
158 0x915c, 0xffffffff, 0x00010000,
159 0x9160, 0xffffffff, 0x00030002,
160 0x9178, 0xffffffff, 0x00070000,
161 0x917c, 0xffffffff, 0x00030002,
162 0x9180, 0xffffffff, 0x00050004,
163 0x918c, 0xffffffff, 0x00010006,
164 0x9190, 0xffffffff, 0x00090008,
165 0x9194, 0xffffffff, 0x00070000,
166 0x9198, 0xffffffff, 0x00030002,
167 0x919c, 0xffffffff, 0x00050004,
168 0x91a8, 0xffffffff, 0x00010006,
169 0x91ac, 0xffffffff, 0x00090008,
170 0x91b0, 0xffffffff, 0x00070000,
171 0x91b4, 0xffffffff, 0x00030002,
172 0x91b8, 0xffffffff, 0x00050004,
173 0x91c4, 0xffffffff, 0x00010006,
174 0x91c8, 0xffffffff, 0x00090008,
175 0x91cc, 0xffffffff, 0x00070000,
176 0x91d0, 0xffffffff, 0x00030002,
177 0x91d4, 0xffffffff, 0x00050004,
178 0x91e0, 0xffffffff, 0x00010006,
179 0x91e4, 0xffffffff, 0x00090008,
180 0x91e8, 0xffffffff, 0x00000000,
181 0x91ec, 0xffffffff, 0x00070000,
182 0x91f0, 0xffffffff, 0x00030002,
183 0x91f4, 0xffffffff, 0x00050004,
184 0x9200, 0xffffffff, 0x00010006,
185 0x9204, 0xffffffff, 0x00090008,
186 0x9208, 0xffffffff, 0x00070000,
187 0x920c, 0xffffffff, 0x00030002,
188 0x9210, 0xffffffff, 0x00050004,
189 0x921c, 0xffffffff, 0x00010006,
190 0x9220, 0xffffffff, 0x00090008,
191 0x9224, 0xffffffff, 0x00070000,
192 0x9228, 0xffffffff, 0x00030002,
193 0x922c, 0xffffffff, 0x00050004,
194 0x9238, 0xffffffff, 0x00010006,
195 0x923c, 0xffffffff, 0x00090008,
196 0x9240, 0xffffffff, 0x00070000,
197 0x9244, 0xffffffff, 0x00030002,
198 0x9248, 0xffffffff, 0x00050004,
199 0x9254, 0xffffffff, 0x00010006,
200 0x9258, 0xffffffff, 0x00090008,
201 0x925c, 0xffffffff, 0x00070000,
202 0x9260, 0xffffffff, 0x00030002,
203 0x9264, 0xffffffff, 0x00050004,
204 0x9270, 0xffffffff, 0x00010006,
205 0x9274, 0xffffffff, 0x00090008,
206 0x9278, 0xffffffff, 0x00070000,
207 0x927c, 0xffffffff, 0x00030002,
208 0x9280, 0xffffffff, 0x00050004,
209 0x928c, 0xffffffff, 0x00010006,
210 0x9290, 0xffffffff, 0x00090008,
211 0x9294, 0xffffffff, 0x00000000,
212 0x929c, 0xffffffff, 0x00000001,
213 0x802c, 0xffffffff, 0x40010000,
214 0x915c, 0xffffffff, 0x00010000,
215 0x9160, 0xffffffff, 0x00030002,
216 0x9178, 0xffffffff, 0x00070000,
217 0x917c, 0xffffffff, 0x00030002,
218 0x9180, 0xffffffff, 0x00050004,
219 0x918c, 0xffffffff, 0x00010006,
220 0x9190, 0xffffffff, 0x00090008,
221 0x9194, 0xffffffff, 0x00070000,
222 0x9198, 0xffffffff, 0x00030002,
223 0x919c, 0xffffffff, 0x00050004,
224 0x91a8, 0xffffffff, 0x00010006,
225 0x91ac, 0xffffffff, 0x00090008,
226 0x91b0, 0xffffffff, 0x00070000,
227 0x91b4, 0xffffffff, 0x00030002,
228 0x91b8, 0xffffffff, 0x00050004,
229 0x91c4, 0xffffffff, 0x00010006,
230 0x91c8, 0xffffffff, 0x00090008,
231 0x91cc, 0xffffffff, 0x00070000,
232 0x91d0, 0xffffffff, 0x00030002,
233 0x91d4, 0xffffffff, 0x00050004,
234 0x91e0, 0xffffffff, 0x00010006,
235 0x91e4, 0xffffffff, 0x00090008,
236 0x91e8, 0xffffffff, 0x00000000,
237 0x91ec, 0xffffffff, 0x00070000,
238 0x91f0, 0xffffffff, 0x00030002,
239 0x91f4, 0xffffffff, 0x00050004,
240 0x9200, 0xffffffff, 0x00010006,
241 0x9204, 0xffffffff, 0x00090008,
242 0x9208, 0xffffffff, 0x00070000,
243 0x920c, 0xffffffff, 0x00030002,
244 0x9210, 0xffffffff, 0x00050004,
245 0x921c, 0xffffffff, 0x00010006,
246 0x9220, 0xffffffff, 0x00090008,
247 0x9224, 0xffffffff, 0x00070000,
248 0x9228, 0xffffffff, 0x00030002,
249 0x922c, 0xffffffff, 0x00050004,
250 0x9238, 0xffffffff, 0x00010006,
251 0x923c, 0xffffffff, 0x00090008,
252 0x9240, 0xffffffff, 0x00070000,
253 0x9244, 0xffffffff, 0x00030002,
254 0x9248, 0xffffffff, 0x00050004,
255 0x9254, 0xffffffff, 0x00010006,
256 0x9258, 0xffffffff, 0x00090008,
257 0x925c, 0xffffffff, 0x00070000,
258 0x9260, 0xffffffff, 0x00030002,
259 0x9264, 0xffffffff, 0x00050004,
260 0x9270, 0xffffffff, 0x00010006,
261 0x9274, 0xffffffff, 0x00090008,
262 0x9278, 0xffffffff, 0x00070000,
263 0x927c, 0xffffffff, 0x00030002,
264 0x9280, 0xffffffff, 0x00050004,
265 0x928c, 0xffffffff, 0x00010006,
266 0x9290, 0xffffffff, 0x00090008,
267 0x9294, 0xffffffff, 0x00000000,
268 0x929c, 0xffffffff, 0x00000001,
269 0x802c, 0xffffffff, 0xc0000000
272 static const u32 redwood_mgcg_init[] =
274 0x802c, 0xffffffff, 0xc0000000,
275 0x5448, 0xffffffff, 0x00000100,
276 0x55e4, 0xffffffff, 0x00000100,
277 0x160c, 0xffffffff, 0x00000100,
278 0x5644, 0xffffffff, 0x00000100,
279 0xc164, 0xffffffff, 0x00000100,
280 0x8a18, 0xffffffff, 0x00000100,
281 0x897c, 0xffffffff, 0x06000100,
282 0x8b28, 0xffffffff, 0x00000100,
283 0x9144, 0xffffffff, 0x00000100,
284 0x9a60, 0xffffffff, 0x00000100,
285 0x9868, 0xffffffff, 0x00000100,
286 0x8d58, 0xffffffff, 0x00000100,
287 0x9510, 0xffffffff, 0x00000100,
288 0x949c, 0xffffffff, 0x00000100,
289 0x9654, 0xffffffff, 0x00000100,
290 0x9030, 0xffffffff, 0x00000100,
291 0x9034, 0xffffffff, 0x00000100,
292 0x9038, 0xffffffff, 0x00000100,
293 0x903c, 0xffffffff, 0x00000100,
294 0x9040, 0xffffffff, 0x00000100,
295 0xa200, 0xffffffff, 0x00000100,
296 0xa204, 0xffffffff, 0x00000100,
297 0xa208, 0xffffffff, 0x00000100,
298 0xa20c, 0xffffffff, 0x00000100,
299 0x971c, 0xffffffff, 0x00000100,
300 0x977c, 0xffffffff, 0x00000100,
301 0x3f80, 0xffffffff, 0x00000100,
302 0xa210, 0xffffffff, 0x00000100,
303 0xa214, 0xffffffff, 0x00000100,
304 0x4d8, 0xffffffff, 0x00000100,
305 0x9784, 0xffffffff, 0x00000100,
306 0x9698, 0xffffffff, 0x00000100,
307 0x4d4, 0xffffffff, 0x00000200,
308 0x30cc, 0xffffffff, 0x00000100,
309 0xd0c0, 0xffffffff, 0xff000100,
310 0x802c, 0xffffffff, 0x40000000,
311 0x915c, 0xffffffff, 0x00010000,
312 0x9160, 0xffffffff, 0x00030002,
313 0x9178, 0xffffffff, 0x00070000,
314 0x917c, 0xffffffff, 0x00030002,
315 0x9180, 0xffffffff, 0x00050004,
316 0x918c, 0xffffffff, 0x00010006,
317 0x9190, 0xffffffff, 0x00090008,
318 0x9194, 0xffffffff, 0x00070000,
319 0x9198, 0xffffffff, 0x00030002,
320 0x919c, 0xffffffff, 0x00050004,
321 0x91a8, 0xffffffff, 0x00010006,
322 0x91ac, 0xffffffff, 0x00090008,
323 0x91b0, 0xffffffff, 0x00070000,
324 0x91b4, 0xffffffff, 0x00030002,
325 0x91b8, 0xffffffff, 0x00050004,
326 0x91c4, 0xffffffff, 0x00010006,
327 0x91c8, 0xffffffff, 0x00090008,
328 0x91cc, 0xffffffff, 0x00070000,
329 0x91d0, 0xffffffff, 0x00030002,
330 0x91d4, 0xffffffff, 0x00050004,
331 0x91e0, 0xffffffff, 0x00010006,
332 0x91e4, 0xffffffff, 0x00090008,
333 0x91e8, 0xffffffff, 0x00000000,
334 0x91ec, 0xffffffff, 0x00070000,
335 0x91f0, 0xffffffff, 0x00030002,
336 0x91f4, 0xffffffff, 0x00050004,
337 0x9200, 0xffffffff, 0x00010006,
338 0x9204, 0xffffffff, 0x00090008,
339 0x9294, 0xffffffff, 0x00000000,
340 0x929c, 0xffffffff, 0x00000001,
341 0x802c, 0xffffffff, 0xc0000000
344 static const u32 cedar_golden_registers[] =
346 0x3f90, 0xffff0000, 0xff000000,
347 0x9148, 0xffff0000, 0xff000000,
348 0x3f94, 0xffff0000, 0xff000000,
349 0x914c, 0xffff0000, 0xff000000,
350 0x9b7c, 0xffffffff, 0x00000000,
351 0x8a14, 0xffffffff, 0x00000007,
352 0x8b10, 0xffffffff, 0x00000000,
353 0x960c, 0xffffffff, 0x54763210,
354 0x88c4, 0xffffffff, 0x000000c2,
355 0x88d4, 0xffffffff, 0x00000000,
356 0x8974, 0xffffffff, 0x00000000,
357 0xc78, 0x00000080, 0x00000080,
358 0x5eb4, 0xffffffff, 0x00000002,
359 0x5e78, 0xffffffff, 0x001000f0,
360 0x6104, 0x01000300, 0x00000000,
361 0x5bc0, 0x00300000, 0x00000000,
362 0x7030, 0xffffffff, 0x00000011,
363 0x7c30, 0xffffffff, 0x00000011,
364 0x10830, 0xffffffff, 0x00000011,
365 0x11430, 0xffffffff, 0x00000011,
366 0xd02c, 0xffffffff, 0x08421000,
367 0x240c, 0xffffffff, 0x00000380,
368 0x8b24, 0xffffffff, 0x00ff0fff,
369 0x28a4c, 0x06000000, 0x06000000,
370 0x10c, 0x00000001, 0x00000001,
371 0x8d00, 0xffffffff, 0x100e4848,
372 0x8d04, 0xffffffff, 0x00164745,
373 0x8c00, 0xffffffff, 0xe4000003,
374 0x8c04, 0xffffffff, 0x40600060,
375 0x8c08, 0xffffffff, 0x001c001c,
376 0x8cf0, 0xffffffff, 0x08e00410,
377 0x8c20, 0xffffffff, 0x00800080,
378 0x8c24, 0xffffffff, 0x00800080,
379 0x8c18, 0xffffffff, 0x20202078,
380 0x8c1c, 0xffffffff, 0x00001010,
381 0x28350, 0xffffffff, 0x00000000,
382 0xa008, 0xffffffff, 0x00010000,
383 0x5cc, 0xffffffff, 0x00000001,
384 0x9508, 0xffffffff, 0x00000002
387 static const u32 cedar_mgcg_init[] =
389 0x802c, 0xffffffff, 0xc0000000,
390 0x5448, 0xffffffff, 0x00000100,
391 0x55e4, 0xffffffff, 0x00000100,
392 0x160c, 0xffffffff, 0x00000100,
393 0x5644, 0xffffffff, 0x00000100,
394 0xc164, 0xffffffff, 0x00000100,
395 0x8a18, 0xffffffff, 0x00000100,
396 0x897c, 0xffffffff, 0x06000100,
397 0x8b28, 0xffffffff, 0x00000100,
398 0x9144, 0xffffffff, 0x00000100,
399 0x9a60, 0xffffffff, 0x00000100,
400 0x9868, 0xffffffff, 0x00000100,
401 0x8d58, 0xffffffff, 0x00000100,
402 0x9510, 0xffffffff, 0x00000100,
403 0x949c, 0xffffffff, 0x00000100,
404 0x9654, 0xffffffff, 0x00000100,
405 0x9030, 0xffffffff, 0x00000100,
406 0x9034, 0xffffffff, 0x00000100,
407 0x9038, 0xffffffff, 0x00000100,
408 0x903c, 0xffffffff, 0x00000100,
409 0x9040, 0xffffffff, 0x00000100,
410 0xa200, 0xffffffff, 0x00000100,
411 0xa204, 0xffffffff, 0x00000100,
412 0xa208, 0xffffffff, 0x00000100,
413 0xa20c, 0xffffffff, 0x00000100,
414 0x971c, 0xffffffff, 0x00000100,
415 0x977c, 0xffffffff, 0x00000100,
416 0x3f80, 0xffffffff, 0x00000100,
417 0xa210, 0xffffffff, 0x00000100,
418 0xa214, 0xffffffff, 0x00000100,
419 0x4d8, 0xffffffff, 0x00000100,
420 0x9784, 0xffffffff, 0x00000100,
421 0x9698, 0xffffffff, 0x00000100,
422 0x4d4, 0xffffffff, 0x00000200,
423 0x30cc, 0xffffffff, 0x00000100,
424 0xd0c0, 0xffffffff, 0xff000100,
425 0x802c, 0xffffffff, 0x40000000,
426 0x915c, 0xffffffff, 0x00010000,
427 0x9178, 0xffffffff, 0x00050000,
428 0x917c, 0xffffffff, 0x00030002,
429 0x918c, 0xffffffff, 0x00010004,
430 0x9190, 0xffffffff, 0x00070006,
431 0x9194, 0xffffffff, 0x00050000,
432 0x9198, 0xffffffff, 0x00030002,
433 0x91a8, 0xffffffff, 0x00010004,
434 0x91ac, 0xffffffff, 0x00070006,
435 0x91e8, 0xffffffff, 0x00000000,
436 0x9294, 0xffffffff, 0x00000000,
437 0x929c, 0xffffffff, 0x00000001,
438 0x802c, 0xffffffff, 0xc0000000
441 static const u32 juniper_mgcg_init[] =
443 0x802c, 0xffffffff, 0xc0000000,
444 0x5448, 0xffffffff, 0x00000100,
445 0x55e4, 0xffffffff, 0x00000100,
446 0x160c, 0xffffffff, 0x00000100,
447 0x5644, 0xffffffff, 0x00000100,
448 0xc164, 0xffffffff, 0x00000100,
449 0x8a18, 0xffffffff, 0x00000100,
450 0x897c, 0xffffffff, 0x06000100,
451 0x8b28, 0xffffffff, 0x00000100,
452 0x9144, 0xffffffff, 0x00000100,
453 0x9a60, 0xffffffff, 0x00000100,
454 0x9868, 0xffffffff, 0x00000100,
455 0x8d58, 0xffffffff, 0x00000100,
456 0x9510, 0xffffffff, 0x00000100,
457 0x949c, 0xffffffff, 0x00000100,
458 0x9654, 0xffffffff, 0x00000100,
459 0x9030, 0xffffffff, 0x00000100,
460 0x9034, 0xffffffff, 0x00000100,
461 0x9038, 0xffffffff, 0x00000100,
462 0x903c, 0xffffffff, 0x00000100,
463 0x9040, 0xffffffff, 0x00000100,
464 0xa200, 0xffffffff, 0x00000100,
465 0xa204, 0xffffffff, 0x00000100,
466 0xa208, 0xffffffff, 0x00000100,
467 0xa20c, 0xffffffff, 0x00000100,
468 0x971c, 0xffffffff, 0x00000100,
469 0xd0c0, 0xffffffff, 0xff000100,
470 0x802c, 0xffffffff, 0x40000000,
471 0x915c, 0xffffffff, 0x00010000,
472 0x9160, 0xffffffff, 0x00030002,
473 0x9178, 0xffffffff, 0x00070000,
474 0x917c, 0xffffffff, 0x00030002,
475 0x9180, 0xffffffff, 0x00050004,
476 0x918c, 0xffffffff, 0x00010006,
477 0x9190, 0xffffffff, 0x00090008,
478 0x9194, 0xffffffff, 0x00070000,
479 0x9198, 0xffffffff, 0x00030002,
480 0x919c, 0xffffffff, 0x00050004,
481 0x91a8, 0xffffffff, 0x00010006,
482 0x91ac, 0xffffffff, 0x00090008,
483 0x91b0, 0xffffffff, 0x00070000,
484 0x91b4, 0xffffffff, 0x00030002,
485 0x91b8, 0xffffffff, 0x00050004,
486 0x91c4, 0xffffffff, 0x00010006,
487 0x91c8, 0xffffffff, 0x00090008,
488 0x91cc, 0xffffffff, 0x00070000,
489 0x91d0, 0xffffffff, 0x00030002,
490 0x91d4, 0xffffffff, 0x00050004,
491 0x91e0, 0xffffffff, 0x00010006,
492 0x91e4, 0xffffffff, 0x00090008,
493 0x91e8, 0xffffffff, 0x00000000,
494 0x91ec, 0xffffffff, 0x00070000,
495 0x91f0, 0xffffffff, 0x00030002,
496 0x91f4, 0xffffffff, 0x00050004,
497 0x9200, 0xffffffff, 0x00010006,
498 0x9204, 0xffffffff, 0x00090008,
499 0x9208, 0xffffffff, 0x00070000,
500 0x920c, 0xffffffff, 0x00030002,
501 0x9210, 0xffffffff, 0x00050004,
502 0x921c, 0xffffffff, 0x00010006,
503 0x9220, 0xffffffff, 0x00090008,
504 0x9224, 0xffffffff, 0x00070000,
505 0x9228, 0xffffffff, 0x00030002,
506 0x922c, 0xffffffff, 0x00050004,
507 0x9238, 0xffffffff, 0x00010006,
508 0x923c, 0xffffffff, 0x00090008,
509 0x9240, 0xffffffff, 0x00070000,
510 0x9244, 0xffffffff, 0x00030002,
511 0x9248, 0xffffffff, 0x00050004,
512 0x9254, 0xffffffff, 0x00010006,
513 0x9258, 0xffffffff, 0x00090008,
514 0x925c, 0xffffffff, 0x00070000,
515 0x9260, 0xffffffff, 0x00030002,
516 0x9264, 0xffffffff, 0x00050004,
517 0x9270, 0xffffffff, 0x00010006,
518 0x9274, 0xffffffff, 0x00090008,
519 0x9278, 0xffffffff, 0x00070000,
520 0x927c, 0xffffffff, 0x00030002,
521 0x9280, 0xffffffff, 0x00050004,
522 0x928c, 0xffffffff, 0x00010006,
523 0x9290, 0xffffffff, 0x00090008,
524 0x9294, 0xffffffff, 0x00000000,
525 0x929c, 0xffffffff, 0x00000001,
526 0x802c, 0xffffffff, 0xc0000000,
527 0x977c, 0xffffffff, 0x00000100,
528 0x3f80, 0xffffffff, 0x00000100,
529 0xa210, 0xffffffff, 0x00000100,
530 0xa214, 0xffffffff, 0x00000100,
531 0x4d8, 0xffffffff, 0x00000100,
532 0x9784, 0xffffffff, 0x00000100,
533 0x9698, 0xffffffff, 0x00000100,
534 0x4d4, 0xffffffff, 0x00000200,
535 0x30cc, 0xffffffff, 0x00000100,
536 0x802c, 0xffffffff, 0xc0000000
539 static const u32 supersumo_golden_registers[] =
541 0x5eb4, 0xffffffff, 0x00000002,
542 0x5cc, 0xffffffff, 0x00000001,
543 0x7030, 0xffffffff, 0x00000011,
544 0x7c30, 0xffffffff, 0x00000011,
545 0x6104, 0x01000300, 0x00000000,
546 0x5bc0, 0x00300000, 0x00000000,
547 0x8c04, 0xffffffff, 0x40600060,
548 0x8c08, 0xffffffff, 0x001c001c,
549 0x8c20, 0xffffffff, 0x00800080,
550 0x8c24, 0xffffffff, 0x00800080,
551 0x8c18, 0xffffffff, 0x20202078,
552 0x8c1c, 0xffffffff, 0x00001010,
553 0x918c, 0xffffffff, 0x00010006,
554 0x91a8, 0xffffffff, 0x00010006,
555 0x91c4, 0xffffffff, 0x00010006,
556 0x91e0, 0xffffffff, 0x00010006,
557 0x9200, 0xffffffff, 0x00010006,
558 0x9150, 0xffffffff, 0x6e944040,
559 0x917c, 0xffffffff, 0x00030002,
560 0x9180, 0xffffffff, 0x00050004,
561 0x9198, 0xffffffff, 0x00030002,
562 0x919c, 0xffffffff, 0x00050004,
563 0x91b4, 0xffffffff, 0x00030002,
564 0x91b8, 0xffffffff, 0x00050004,
565 0x91d0, 0xffffffff, 0x00030002,
566 0x91d4, 0xffffffff, 0x00050004,
567 0x91f0, 0xffffffff, 0x00030002,
568 0x91f4, 0xffffffff, 0x00050004,
569 0x915c, 0xffffffff, 0x00010000,
570 0x9160, 0xffffffff, 0x00030002,
571 0x3f90, 0xffff0000, 0xff000000,
572 0x9178, 0xffffffff, 0x00070000,
573 0x9194, 0xffffffff, 0x00070000,
574 0x91b0, 0xffffffff, 0x00070000,
575 0x91cc, 0xffffffff, 0x00070000,
576 0x91ec, 0xffffffff, 0x00070000,
577 0x9148, 0xffff0000, 0xff000000,
578 0x9190, 0xffffffff, 0x00090008,
579 0x91ac, 0xffffffff, 0x00090008,
580 0x91c8, 0xffffffff, 0x00090008,
581 0x91e4, 0xffffffff, 0x00090008,
582 0x9204, 0xffffffff, 0x00090008,
583 0x3f94, 0xffff0000, 0xff000000,
584 0x914c, 0xffff0000, 0xff000000,
585 0x929c, 0xffffffff, 0x00000001,
586 0x8a18, 0xffffffff, 0x00000100,
587 0x8b28, 0xffffffff, 0x00000100,
588 0x9144, 0xffffffff, 0x00000100,
589 0x5644, 0xffffffff, 0x00000100,
590 0x9b7c, 0xffffffff, 0x00000000,
591 0x8030, 0xffffffff, 0x0000100a,
592 0x8a14, 0xffffffff, 0x00000007,
593 0x8b24, 0xffffffff, 0x00ff0fff,
594 0x8b10, 0xffffffff, 0x00000000,
595 0x28a4c, 0x06000000, 0x06000000,
596 0x4d8, 0xffffffff, 0x00000100,
597 0x913c, 0xffff000f, 0x0100000a,
598 0x960c, 0xffffffff, 0x54763210,
599 0x88c4, 0xffffffff, 0x000000c2,
600 0x88d4, 0xffffffff, 0x00000010,
601 0x8974, 0xffffffff, 0x00000000,
602 0xc78, 0x00000080, 0x00000080,
603 0x5e78, 0xffffffff, 0x001000f0,
604 0xd02c, 0xffffffff, 0x08421000,
605 0xa008, 0xffffffff, 0x00010000,
606 0x8d00, 0xffffffff, 0x100e4848,
607 0x8d04, 0xffffffff, 0x00164745,
608 0x8c00, 0xffffffff, 0xe4000003,
609 0x8cf0, 0x1fffffff, 0x08e00620,
610 0x28350, 0xffffffff, 0x00000000,
611 0x9508, 0xffffffff, 0x00000002
614 static const u32 sumo_golden_registers[] =
616 0x900c, 0x00ffffff, 0x0017071f,
617 0x8c18, 0xffffffff, 0x10101060,
618 0x8c1c, 0xffffffff, 0x00001010,
619 0x8c30, 0x0000000f, 0x00000005,
620 0x9688, 0x0000000f, 0x00000007
623 static const u32 wrestler_golden_registers[] =
625 0x5eb4, 0xffffffff, 0x00000002,
626 0x5cc, 0xffffffff, 0x00000001,
627 0x7030, 0xffffffff, 0x00000011,
628 0x7c30, 0xffffffff, 0x00000011,
629 0x6104, 0x01000300, 0x00000000,
630 0x5bc0, 0x00300000, 0x00000000,
631 0x918c, 0xffffffff, 0x00010006,
632 0x91a8, 0xffffffff, 0x00010006,
633 0x9150, 0xffffffff, 0x6e944040,
634 0x917c, 0xffffffff, 0x00030002,
635 0x9198, 0xffffffff, 0x00030002,
636 0x915c, 0xffffffff, 0x00010000,
637 0x3f90, 0xffff0000, 0xff000000,
638 0x9178, 0xffffffff, 0x00070000,
639 0x9194, 0xffffffff, 0x00070000,
640 0x9148, 0xffff0000, 0xff000000,
641 0x9190, 0xffffffff, 0x00090008,
642 0x91ac, 0xffffffff, 0x00090008,
643 0x3f94, 0xffff0000, 0xff000000,
644 0x914c, 0xffff0000, 0xff000000,
645 0x929c, 0xffffffff, 0x00000001,
646 0x8a18, 0xffffffff, 0x00000100,
647 0x8b28, 0xffffffff, 0x00000100,
648 0x9144, 0xffffffff, 0x00000100,
649 0x9b7c, 0xffffffff, 0x00000000,
650 0x8030, 0xffffffff, 0x0000100a,
651 0x8a14, 0xffffffff, 0x00000001,
652 0x8b24, 0xffffffff, 0x00ff0fff,
653 0x8b10, 0xffffffff, 0x00000000,
654 0x28a4c, 0x06000000, 0x06000000,
655 0x4d8, 0xffffffff, 0x00000100,
656 0x913c, 0xffff000f, 0x0100000a,
657 0x960c, 0xffffffff, 0x54763210,
658 0x88c4, 0xffffffff, 0x000000c2,
659 0x88d4, 0xffffffff, 0x00000010,
660 0x8974, 0xffffffff, 0x00000000,
661 0xc78, 0x00000080, 0x00000080,
662 0x5e78, 0xffffffff, 0x001000f0,
663 0xd02c, 0xffffffff, 0x08421000,
664 0xa008, 0xffffffff, 0x00010000,
665 0x8d00, 0xffffffff, 0x100e4848,
666 0x8d04, 0xffffffff, 0x00164745,
667 0x8c00, 0xffffffff, 0xe4000003,
668 0x8cf0, 0x1fffffff, 0x08e00410,
669 0x28350, 0xffffffff, 0x00000000,
670 0x9508, 0xffffffff, 0x00000002,
671 0x900c, 0xffffffff, 0x0017071f,
672 0x8c18, 0xffffffff, 0x10101060,
673 0x8c1c, 0xffffffff, 0x00001010
676 static const u32 barts_golden_registers[] =
678 0x5eb4, 0xffffffff, 0x00000002,
679 0x5e78, 0x8f311ff1, 0x001000f0,
680 0x3f90, 0xffff0000, 0xff000000,
681 0x9148, 0xffff0000, 0xff000000,
682 0x3f94, 0xffff0000, 0xff000000,
683 0x914c, 0xffff0000, 0xff000000,
684 0xc78, 0x00000080, 0x00000080,
685 0xbd4, 0x70073777, 0x00010001,
686 0xd02c, 0xbfffff1f, 0x08421000,
687 0xd0b8, 0x03773777, 0x02011003,
688 0x5bc0, 0x00200000, 0x50100000,
689 0x98f8, 0x33773777, 0x02011003,
690 0x98fc, 0xffffffff, 0x76543210,
691 0x7030, 0x31000311, 0x00000011,
692 0x2f48, 0x00000007, 0x02011003,
693 0x6b28, 0x00000010, 0x00000012,
694 0x7728, 0x00000010, 0x00000012,
695 0x10328, 0x00000010, 0x00000012,
696 0x10f28, 0x00000010, 0x00000012,
697 0x11b28, 0x00000010, 0x00000012,
698 0x12728, 0x00000010, 0x00000012,
699 0x240c, 0x000007ff, 0x00000380,
700 0x8a14, 0xf000001f, 0x00000007,
701 0x8b24, 0x3fff3fff, 0x00ff0fff,
702 0x8b10, 0x0000ff0f, 0x00000000,
703 0x28a4c, 0x07ffffff, 0x06000000,
704 0x10c, 0x00000001, 0x00010003,
705 0xa02c, 0xffffffff, 0x0000009b,
706 0x913c, 0x0000000f, 0x0100000a,
707 0x8d00, 0xffff7f7f, 0x100e4848,
708 0x8d04, 0x00ffffff, 0x00164745,
709 0x8c00, 0xfffc0003, 0xe4000003,
710 0x8c04, 0xf8ff00ff, 0x40600060,
711 0x8c08, 0x00ff00ff, 0x001c001c,
712 0x8cf0, 0x1fff1fff, 0x08e00620,
713 0x8c20, 0x0fff0fff, 0x00800080,
714 0x8c24, 0x0fff0fff, 0x00800080,
715 0x8c18, 0xffffffff, 0x20202078,
716 0x8c1c, 0x0000ffff, 0x00001010,
717 0x28350, 0x00000f01, 0x00000000,
718 0x9508, 0x3700001f, 0x00000002,
719 0x960c, 0xffffffff, 0x54763210,
720 0x88c4, 0x001f3ae3, 0x000000c2,
721 0x88d4, 0x0000001f, 0x00000010,
722 0x8974, 0xffffffff, 0x00000000
725 static const u32 turks_golden_registers[] =
727 0x5eb4, 0xffffffff, 0x00000002,
728 0x5e78, 0x8f311ff1, 0x001000f0,
729 0x8c8, 0x00003000, 0x00001070,
730 0x8cc, 0x000fffff, 0x00040035,
731 0x3f90, 0xffff0000, 0xfff00000,
732 0x9148, 0xffff0000, 0xfff00000,
733 0x3f94, 0xffff0000, 0xfff00000,
734 0x914c, 0xffff0000, 0xfff00000,
735 0xc78, 0x00000080, 0x00000080,
736 0xbd4, 0x00073007, 0x00010002,
737 0xd02c, 0xbfffff1f, 0x08421000,
738 0xd0b8, 0x03773777, 0x02010002,
739 0x5bc0, 0x00200000, 0x50100000,
740 0x98f8, 0x33773777, 0x00010002,
741 0x98fc, 0xffffffff, 0x33221100,
742 0x7030, 0x31000311, 0x00000011,
743 0x2f48, 0x33773777, 0x00010002,
744 0x6b28, 0x00000010, 0x00000012,
745 0x7728, 0x00000010, 0x00000012,
746 0x10328, 0x00000010, 0x00000012,
747 0x10f28, 0x00000010, 0x00000012,
748 0x11b28, 0x00000010, 0x00000012,
749 0x12728, 0x00000010, 0x00000012,
750 0x240c, 0x000007ff, 0x00000380,
751 0x8a14, 0xf000001f, 0x00000007,
752 0x8b24, 0x3fff3fff, 0x00ff0fff,
753 0x8b10, 0x0000ff0f, 0x00000000,
754 0x28a4c, 0x07ffffff, 0x06000000,
755 0x10c, 0x00000001, 0x00010003,
756 0xa02c, 0xffffffff, 0x0000009b,
757 0x913c, 0x0000000f, 0x0100000a,
758 0x8d00, 0xffff7f7f, 0x100e4848,
759 0x8d04, 0x00ffffff, 0x00164745,
760 0x8c00, 0xfffc0003, 0xe4000003,
761 0x8c04, 0xf8ff00ff, 0x40600060,
762 0x8c08, 0x00ff00ff, 0x001c001c,
763 0x8cf0, 0x1fff1fff, 0x08e00410,
764 0x8c20, 0x0fff0fff, 0x00800080,
765 0x8c24, 0x0fff0fff, 0x00800080,
766 0x8c18, 0xffffffff, 0x20202078,
767 0x8c1c, 0x0000ffff, 0x00001010,
768 0x28350, 0x00000f01, 0x00000000,
769 0x9508, 0x3700001f, 0x00000002,
770 0x960c, 0xffffffff, 0x54763210,
771 0x88c4, 0x001f3ae3, 0x000000c2,
772 0x88d4, 0x0000001f, 0x00000010,
773 0x8974, 0xffffffff, 0x00000000
776 static const u32 caicos_golden_registers[] =
778 0x5eb4, 0xffffffff, 0x00000002,
779 0x5e78, 0x8f311ff1, 0x001000f0,
780 0x8c8, 0x00003420, 0x00001450,
781 0x8cc, 0x000fffff, 0x00040035,
782 0x3f90, 0xffff0000, 0xfffc0000,
783 0x9148, 0xffff0000, 0xfffc0000,
784 0x3f94, 0xffff0000, 0xfffc0000,
785 0x914c, 0xffff0000, 0xfffc0000,
786 0xc78, 0x00000080, 0x00000080,
787 0xbd4, 0x00073007, 0x00010001,
788 0xd02c, 0xbfffff1f, 0x08421000,
789 0xd0b8, 0x03773777, 0x02010001,
790 0x5bc0, 0x00200000, 0x50100000,
791 0x98f8, 0x33773777, 0x02010001,
792 0x98fc, 0xffffffff, 0x33221100,
793 0x7030, 0x31000311, 0x00000011,
794 0x2f48, 0x33773777, 0x02010001,
795 0x6b28, 0x00000010, 0x00000012,
796 0x7728, 0x00000010, 0x00000012,
797 0x10328, 0x00000010, 0x00000012,
798 0x10f28, 0x00000010, 0x00000012,
799 0x11b28, 0x00000010, 0x00000012,
800 0x12728, 0x00000010, 0x00000012,
801 0x240c, 0x000007ff, 0x00000380,
802 0x8a14, 0xf000001f, 0x00000001,
803 0x8b24, 0x3fff3fff, 0x00ff0fff,
804 0x8b10, 0x0000ff0f, 0x00000000,
805 0x28a4c, 0x07ffffff, 0x06000000,
806 0x10c, 0x00000001, 0x00010003,
807 0xa02c, 0xffffffff, 0x0000009b,
808 0x913c, 0x0000000f, 0x0100000a,
809 0x8d00, 0xffff7f7f, 0x100e4848,
810 0x8d04, 0x00ffffff, 0x00164745,
811 0x8c00, 0xfffc0003, 0xe4000003,
812 0x8c04, 0xf8ff00ff, 0x40600060,
813 0x8c08, 0x00ff00ff, 0x001c001c,
814 0x8cf0, 0x1fff1fff, 0x08e00410,
815 0x8c20, 0x0fff0fff, 0x00800080,
816 0x8c24, 0x0fff0fff, 0x00800080,
817 0x8c18, 0xffffffff, 0x20202078,
818 0x8c1c, 0x0000ffff, 0x00001010,
819 0x28350, 0x00000f01, 0x00000000,
820 0x9508, 0x3700001f, 0x00000002,
821 0x960c, 0xffffffff, 0x54763210,
822 0x88c4, 0x001f3ae3, 0x000000c2,
823 0x88d4, 0x0000001f, 0x00000010,
824 0x8974, 0xffffffff, 0x00000000
827 static void evergreen_init_golden_registers(struct radeon_device *rdev)
829 switch (rdev->family) {
832 radeon_program_register_sequence(rdev,
833 evergreen_golden_registers,
834 (const u32)ARRAY_SIZE(evergreen_golden_registers));
835 radeon_program_register_sequence(rdev,
836 evergreen_golden_registers2,
837 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
838 radeon_program_register_sequence(rdev,
840 (const u32)ARRAY_SIZE(cypress_mgcg_init));
843 radeon_program_register_sequence(rdev,
844 evergreen_golden_registers,
845 (const u32)ARRAY_SIZE(evergreen_golden_registers));
846 radeon_program_register_sequence(rdev,
847 evergreen_golden_registers2,
848 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
849 radeon_program_register_sequence(rdev,
851 (const u32)ARRAY_SIZE(juniper_mgcg_init));
854 radeon_program_register_sequence(rdev,
855 evergreen_golden_registers,
856 (const u32)ARRAY_SIZE(evergreen_golden_registers));
857 radeon_program_register_sequence(rdev,
858 evergreen_golden_registers2,
859 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
860 radeon_program_register_sequence(rdev,
862 (const u32)ARRAY_SIZE(redwood_mgcg_init));
865 radeon_program_register_sequence(rdev,
866 cedar_golden_registers,
867 (const u32)ARRAY_SIZE(cedar_golden_registers));
868 radeon_program_register_sequence(rdev,
869 evergreen_golden_registers2,
870 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
871 radeon_program_register_sequence(rdev,
873 (const u32)ARRAY_SIZE(cedar_mgcg_init));
876 radeon_program_register_sequence(rdev,
877 wrestler_golden_registers,
878 (const u32)ARRAY_SIZE(wrestler_golden_registers));
881 radeon_program_register_sequence(rdev,
882 supersumo_golden_registers,
883 (const u32)ARRAY_SIZE(supersumo_golden_registers));
886 radeon_program_register_sequence(rdev,
887 supersumo_golden_registers,
888 (const u32)ARRAY_SIZE(supersumo_golden_registers));
889 radeon_program_register_sequence(rdev,
890 sumo_golden_registers,
891 (const u32)ARRAY_SIZE(sumo_golden_registers));
894 radeon_program_register_sequence(rdev,
895 barts_golden_registers,
896 (const u32)ARRAY_SIZE(barts_golden_registers));
899 radeon_program_register_sequence(rdev,
900 turks_golden_registers,
901 (const u32)ARRAY_SIZE(turks_golden_registers));
904 radeon_program_register_sequence(rdev,
905 caicos_golden_registers,
906 (const u32)ARRAY_SIZE(caicos_golden_registers));
913 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
914 unsigned *bankh, unsigned *mtaspect,
915 unsigned *tile_split)
917 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
918 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
919 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
920 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
923 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
924 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
925 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
926 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
930 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
931 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
932 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
933 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
937 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
938 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
939 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
940 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
944 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
945 u32 cntl_reg, u32 status_reg)
948 struct atom_clock_dividers dividers;
950 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
951 clock, false, ÷rs);
955 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
957 for (i = 0; i < 100; i++) {
958 if (RREG32(status_reg) & DCLK_STATUS)
968 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
971 u32 cg_scratch = RREG32(CG_SCRATCH1);
973 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
976 cg_scratch &= 0xffff0000;
977 cg_scratch |= vclk / 100; /* Mhz */
979 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
982 cg_scratch &= 0x0000ffff;
983 cg_scratch |= (dclk / 100) << 16; /* Mhz */
986 WREG32(CG_SCRATCH1, cg_scratch);
991 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
993 /* start off with something large */
994 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
997 /* bypass vclk and dclk with bclk */
998 WREG32_P(CG_UPLL_FUNC_CNTL_2,
999 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1000 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1002 /* put PLL in bypass mode */
1003 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1005 if (!vclk || !dclk) {
1006 /* keep the Bypass mode, put PLL to sleep */
1007 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1011 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1012 16384, 0x03FFFFFF, 0, 128, 5,
1013 &fb_div, &vclk_div, &dclk_div);
1017 /* set VCO_MODE to 1 */
1018 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1020 /* toggle UPLL_SLEEP to 1 then back to 0 */
1021 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1022 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1024 /* deassert UPLL_RESET */
1025 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1029 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1033 /* assert UPLL_RESET again */
1034 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1036 /* disable spread spectrum. */
1037 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1039 /* set feedback divider */
1040 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1042 /* set ref divider to 0 */
1043 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1045 if (fb_div < 307200)
1046 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1048 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1050 /* set PDIV_A and PDIV_B */
1051 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1052 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1053 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1055 /* give the PLL some time to settle */
1058 /* deassert PLL_RESET */
1059 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1063 /* switch from bypass mode to normal mode */
1064 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1066 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1070 /* switch VCLK and DCLK selection */
1071 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1072 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1073 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1080 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1085 err = pci_find_extcap(rdev->dev, PCIY_EXPRESS, &cap);
1089 cap += PCIER_DEVCTRL;
1091 ctl = pci_read_config(rdev->dev, cap, 2);
1093 v = (ctl & PCIEM_DEVCTL_MAX_READRQ_MASK) >> 12;
1095 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1096 * to avoid hangs or perfomance issues
1098 if ((v == 0) || (v == 6) || (v == 7)) {
1099 ctl &= ~PCIEM_DEVCTL_MAX_READRQ_MASK;
1101 pci_write_config(rdev->dev, cap, ctl, 2);
1105 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1107 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1113 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1117 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1118 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1127 * dce4_wait_for_vblank - vblank wait asic callback.
1129 * @rdev: radeon_device pointer
1130 * @crtc: crtc to wait for vblank on
1132 * Wait for vblank on the requested crtc (evergreen+).
1134 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1138 if (crtc >= rdev->num_crtc)
1141 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1144 /* depending on when we hit vblank, we may be close to active; if so,
1145 * wait for another frame.
1147 while (dce4_is_in_vblank(rdev, crtc)) {
1148 if (i++ % 100 == 0) {
1149 if (!dce4_is_counter_moving(rdev, crtc))
1154 while (!dce4_is_in_vblank(rdev, crtc)) {
1155 if (i++ % 100 == 0) {
1156 if (!dce4_is_counter_moving(rdev, crtc))
1163 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1165 * @rdev: radeon_device pointer
1166 * @crtc: crtc to prepare for pageflip on
1168 * Pre-pageflip callback (evergreen+).
1169 * Enables the pageflip irq (vblank irq).
1171 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1173 /* enable the pflip int */
1174 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1178 * evergreen_post_page_flip - pos-pageflip callback.
1180 * @rdev: radeon_device pointer
1181 * @crtc: crtc to cleanup pageflip on
1183 * Post-pageflip callback (evergreen+).
1184 * Disables the pageflip irq (vblank irq).
1186 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1188 /* disable the pflip int */
1189 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1193 * evergreen_page_flip - pageflip callback.
1195 * @rdev: radeon_device pointer
1196 * @crtc_id: crtc to cleanup pageflip on
1197 * @crtc_base: new address of the crtc (GPU MC address)
1199 * Does the actual pageflip (evergreen+).
1200 * During vblank we take the crtc lock and wait for the update_pending
1201 * bit to go high, when it does, we release the lock, and allow the
1202 * double buffered update to take place.
1203 * Returns the current update pending status.
1205 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1207 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1208 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1211 /* Lock the graphics update lock */
1212 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1213 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1215 /* update the scanout addresses */
1216 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1217 upper_32_bits(crtc_base));
1218 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1221 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1222 upper_32_bits(crtc_base));
1223 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1226 /* Wait for update_pending to go high. */
1227 for (i = 0; i < rdev->usec_timeout; i++) {
1228 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1232 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1234 /* Unlock the lock, so double-buffering can take place inside vblank */
1235 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1236 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1238 /* Return current update_pending status: */
1239 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1242 /* get temperature in millidegrees */
1243 int evergreen_get_temp(struct radeon_device *rdev)
1246 int actual_temp = 0;
1248 if (rdev->family == CHIP_JUNIPER) {
1249 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1251 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1254 if (toffset & 0x100)
1255 actual_temp = temp / 2 - (0x200 - toffset);
1257 actual_temp = temp / 2 + toffset;
1259 actual_temp = actual_temp * 1000;
1262 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1267 else if (temp & 0x200)
1269 else if (temp & 0x100) {
1270 actual_temp = temp & 0x1ff;
1271 actual_temp |= ~0x1ff;
1273 actual_temp = temp & 0xff;
1275 actual_temp = (actual_temp * 1000) / 2;
1281 int sumo_get_temp(struct radeon_device *rdev)
1283 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1284 int actual_temp = temp - 49;
1286 return actual_temp * 1000;
1290 * sumo_pm_init_profile - Initialize power profiles callback.
1292 * @rdev: radeon_device pointer
1294 * Initialize the power states used in profile mode
1295 * (sumo, trinity, SI).
1296 * Used for profile mode only.
1298 void sumo_pm_init_profile(struct radeon_device *rdev)
1303 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1304 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1305 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1306 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1309 if (rdev->flags & RADEON_IS_MOBILITY)
1310 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1312 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1314 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1315 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1316 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1317 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1319 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1320 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1321 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1322 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1324 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1325 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1326 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1327 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1329 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1330 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1331 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1332 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1335 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1336 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1337 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1338 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1339 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1340 rdev->pm.power_state[idx].num_clock_modes - 1;
1342 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1343 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1344 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1345 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1346 rdev->pm.power_state[idx].num_clock_modes - 1;
1350 * btc_pm_init_profile - Initialize power profiles callback.
1352 * @rdev: radeon_device pointer
1354 * Initialize the power states used in profile mode
1356 * Used for profile mode only.
1358 void btc_pm_init_profile(struct radeon_device *rdev)
1363 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1364 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1365 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1366 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1367 /* starting with BTC, there is one state that is used for both
1368 * MH and SH. Difference is that we always use the high clock index for
1371 if (rdev->flags & RADEON_IS_MOBILITY)
1372 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1374 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1376 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1377 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1378 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1379 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1381 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1382 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1383 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1384 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1386 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1387 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1388 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1389 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1391 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1392 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1393 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1394 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1396 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1397 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1398 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1399 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1401 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1402 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1403 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1404 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1408 * evergreen_pm_misc - set additional pm hw parameters callback.
1410 * @rdev: radeon_device pointer
1412 * Set non-clock parameters associated with a power state
1413 * (voltage, etc.) (evergreen+).
1415 void evergreen_pm_misc(struct radeon_device *rdev)
1417 int req_ps_idx = rdev->pm.requested_power_state_index;
1418 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1419 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1420 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1422 if (voltage->type == VOLTAGE_SW) {
1423 /* 0xff01 is a flag rather then an actual voltage */
1424 if (voltage->voltage == 0xff01)
1426 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1427 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1428 rdev->pm.current_vddc = voltage->voltage;
1429 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1432 /* starting with BTC, there is one state that is used for both
1433 * MH and SH. Difference is that we always use the high clock index for
1436 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1437 (rdev->family >= CHIP_BARTS) &&
1438 rdev->pm.active_crtc_count &&
1439 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1440 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1441 voltage = &rdev->pm.power_state[req_ps_idx].
1442 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1444 /* 0xff01 is a flag rather then an actual voltage */
1445 if (voltage->vddci == 0xff01)
1447 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1448 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1449 rdev->pm.current_vddci = voltage->vddci;
1450 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1456 * evergreen_pm_prepare - pre-power state change callback.
1458 * @rdev: radeon_device pointer
1460 * Prepare for a power state change (evergreen+).
1462 void evergreen_pm_prepare(struct radeon_device *rdev)
1464 struct drm_device *ddev = rdev->ddev;
1465 struct drm_crtc *crtc;
1466 struct radeon_crtc *radeon_crtc;
1469 /* disable any active CRTCs */
1470 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1471 radeon_crtc = to_radeon_crtc(crtc);
1472 if (radeon_crtc->enabled) {
1473 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1474 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1475 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1481 * evergreen_pm_finish - post-power state change callback.
1483 * @rdev: radeon_device pointer
1485 * Clean up after a power state change (evergreen+).
1487 void evergreen_pm_finish(struct radeon_device *rdev)
1489 struct drm_device *ddev = rdev->ddev;
1490 struct drm_crtc *crtc;
1491 struct radeon_crtc *radeon_crtc;
1494 /* enable any active CRTCs */
1495 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1496 radeon_crtc = to_radeon_crtc(crtc);
1497 if (radeon_crtc->enabled) {
1498 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1499 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1500 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1506 * evergreen_hpd_sense - hpd sense callback.
1508 * @rdev: radeon_device pointer
1509 * @hpd: hpd (hotplug detect) pin
1511 * Checks if a digital monitor is connected (evergreen+).
1512 * Returns true if connected, false if not connected.
1514 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1516 bool connected = false;
1520 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1524 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1528 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1532 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1536 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1540 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1551 * evergreen_hpd_set_polarity - hpd set polarity callback.
1553 * @rdev: radeon_device pointer
1554 * @hpd: hpd (hotplug detect) pin
1556 * Set the polarity of the hpd pin (evergreen+).
1558 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1559 enum radeon_hpd_id hpd)
1562 bool connected = evergreen_hpd_sense(rdev, hpd);
1566 tmp = RREG32(DC_HPD1_INT_CONTROL);
1568 tmp &= ~DC_HPDx_INT_POLARITY;
1570 tmp |= DC_HPDx_INT_POLARITY;
1571 WREG32(DC_HPD1_INT_CONTROL, tmp);
1574 tmp = RREG32(DC_HPD2_INT_CONTROL);
1576 tmp &= ~DC_HPDx_INT_POLARITY;
1578 tmp |= DC_HPDx_INT_POLARITY;
1579 WREG32(DC_HPD2_INT_CONTROL, tmp);
1582 tmp = RREG32(DC_HPD3_INT_CONTROL);
1584 tmp &= ~DC_HPDx_INT_POLARITY;
1586 tmp |= DC_HPDx_INT_POLARITY;
1587 WREG32(DC_HPD3_INT_CONTROL, tmp);
1590 tmp = RREG32(DC_HPD4_INT_CONTROL);
1592 tmp &= ~DC_HPDx_INT_POLARITY;
1594 tmp |= DC_HPDx_INT_POLARITY;
1595 WREG32(DC_HPD4_INT_CONTROL, tmp);
1598 tmp = RREG32(DC_HPD5_INT_CONTROL);
1600 tmp &= ~DC_HPDx_INT_POLARITY;
1602 tmp |= DC_HPDx_INT_POLARITY;
1603 WREG32(DC_HPD5_INT_CONTROL, tmp);
1606 tmp = RREG32(DC_HPD6_INT_CONTROL);
1608 tmp &= ~DC_HPDx_INT_POLARITY;
1610 tmp |= DC_HPDx_INT_POLARITY;
1611 WREG32(DC_HPD6_INT_CONTROL, tmp);
1619 * evergreen_hpd_init - hpd setup callback.
1621 * @rdev: radeon_device pointer
1623 * Setup the hpd pins used by the card (evergreen+).
1624 * Enable the pin, set the polarity, and enable the hpd interrupts.
1626 void evergreen_hpd_init(struct radeon_device *rdev)
1628 struct drm_device *dev = rdev->ddev;
1629 struct drm_connector *connector;
1630 unsigned enabled = 0;
1631 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1632 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1634 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1635 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1637 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1638 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1639 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1640 * aux dp channel on imac and help (but not completely fix)
1641 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1642 * also avoid interrupt storms during dpms.
1646 switch (radeon_connector->hpd.hpd) {
1648 WREG32(DC_HPD1_CONTROL, tmp);
1651 WREG32(DC_HPD2_CONTROL, tmp);
1654 WREG32(DC_HPD3_CONTROL, tmp);
1657 WREG32(DC_HPD4_CONTROL, tmp);
1660 WREG32(DC_HPD5_CONTROL, tmp);
1663 WREG32(DC_HPD6_CONTROL, tmp);
1668 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1669 enabled |= 1 << radeon_connector->hpd.hpd;
1671 radeon_irq_kms_enable_hpd(rdev, enabled);
1675 * evergreen_hpd_fini - hpd tear down callback.
1677 * @rdev: radeon_device pointer
1679 * Tear down the hpd pins used by the card (evergreen+).
1680 * Disable the hpd interrupts.
1682 void evergreen_hpd_fini(struct radeon_device *rdev)
1684 struct drm_device *dev = rdev->ddev;
1685 struct drm_connector *connector;
1686 unsigned disabled = 0;
1688 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1689 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1690 switch (radeon_connector->hpd.hpd) {
1692 WREG32(DC_HPD1_CONTROL, 0);
1695 WREG32(DC_HPD2_CONTROL, 0);
1698 WREG32(DC_HPD3_CONTROL, 0);
1701 WREG32(DC_HPD4_CONTROL, 0);
1704 WREG32(DC_HPD5_CONTROL, 0);
1707 WREG32(DC_HPD6_CONTROL, 0);
1712 disabled |= 1 << radeon_connector->hpd.hpd;
1714 radeon_irq_kms_disable_hpd(rdev, disabled);
1717 /* watermark setup */
1719 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1720 struct radeon_crtc *radeon_crtc,
1721 struct drm_display_mode *mode,
1722 struct drm_display_mode *other_mode)
1727 * There are 3 line buffers, each one shared by 2 display controllers.
1728 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1729 * the display controllers. The paritioning is done via one of four
1730 * preset allocations specified in bits 2:0:
1731 * first display controller
1732 * 0 - first half of lb (3840 * 2)
1733 * 1 - first 3/4 of lb (5760 * 2)
1734 * 2 - whole lb (7680 * 2), other crtc must be disabled
1735 * 3 - first 1/4 of lb (1920 * 2)
1736 * second display controller
1737 * 4 - second half of lb (3840 * 2)
1738 * 5 - second 3/4 of lb (5760 * 2)
1739 * 6 - whole lb (7680 * 2), other crtc must be disabled
1740 * 7 - last 1/4 of lb (1920 * 2)
1742 /* this can get tricky if we have two large displays on a paired group
1743 * of crtcs. Ideally for multiple large displays we'd assign them to
1744 * non-linked crtcs for maximum line buffer allocation.
1746 if (radeon_crtc->base.enabled && mode) {
1750 tmp = 2; /* whole */
1754 /* second controller of the pair uses second half of the lb */
1755 if (radeon_crtc->crtc_id % 2)
1757 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1759 if (radeon_crtc->base.enabled && mode) {
1764 if (ASIC_IS_DCE5(rdev))
1770 if (ASIC_IS_DCE5(rdev))
1776 if (ASIC_IS_DCE5(rdev))
1782 if (ASIC_IS_DCE5(rdev))
1789 /* controller not enabled, so no lb used */
1793 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1795 u32 tmp = RREG32(MC_SHARED_CHMAP);
1797 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1810 struct evergreen_wm_params {
1811 u32 dram_channels; /* number of dram channels */
1812 u32 yclk; /* bandwidth per dram data pin in kHz */
1813 u32 sclk; /* engine clock in kHz */
1814 u32 disp_clk; /* display clock in kHz */
1815 u32 src_width; /* viewport width */
1816 u32 active_time; /* active display time in ns */
1817 u32 blank_time; /* blank time in ns */
1818 bool interlaced; /* mode is interlaced */
1819 fixed20_12 vsc; /* vertical scale ratio */
1820 u32 num_heads; /* number of active crtcs */
1821 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1822 u32 lb_size; /* line buffer allocated to pipe */
1823 u32 vtaps; /* vertical scaler taps */
1826 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1828 /* Calculate DRAM Bandwidth and the part allocated to display. */
1829 fixed20_12 dram_efficiency; /* 0.7 */
1830 fixed20_12 yclk, dram_channels, bandwidth;
1833 a.full = dfixed_const(1000);
1834 yclk.full = dfixed_const(wm->yclk);
1835 yclk.full = dfixed_div(yclk, a);
1836 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1837 a.full = dfixed_const(10);
1838 dram_efficiency.full = dfixed_const(7);
1839 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1840 bandwidth.full = dfixed_mul(dram_channels, yclk);
1841 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1843 return dfixed_trunc(bandwidth);
1846 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1848 /* Calculate DRAM Bandwidth and the part allocated to display. */
1849 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1850 fixed20_12 yclk, dram_channels, bandwidth;
1853 a.full = dfixed_const(1000);
1854 yclk.full = dfixed_const(wm->yclk);
1855 yclk.full = dfixed_div(yclk, a);
1856 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1857 a.full = dfixed_const(10);
1858 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1859 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1860 bandwidth.full = dfixed_mul(dram_channels, yclk);
1861 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1863 return dfixed_trunc(bandwidth);
1866 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1868 /* Calculate the display Data return Bandwidth */
1869 fixed20_12 return_efficiency; /* 0.8 */
1870 fixed20_12 sclk, bandwidth;
1873 a.full = dfixed_const(1000);
1874 sclk.full = dfixed_const(wm->sclk);
1875 sclk.full = dfixed_div(sclk, a);
1876 a.full = dfixed_const(10);
1877 return_efficiency.full = dfixed_const(8);
1878 return_efficiency.full = dfixed_div(return_efficiency, a);
1879 a.full = dfixed_const(32);
1880 bandwidth.full = dfixed_mul(a, sclk);
1881 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1883 return dfixed_trunc(bandwidth);
1886 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1888 /* Calculate the DMIF Request Bandwidth */
1889 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1890 fixed20_12 disp_clk, bandwidth;
1893 a.full = dfixed_const(1000);
1894 disp_clk.full = dfixed_const(wm->disp_clk);
1895 disp_clk.full = dfixed_div(disp_clk, a);
1896 a.full = dfixed_const(10);
1897 disp_clk_request_efficiency.full = dfixed_const(8);
1898 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1899 a.full = dfixed_const(32);
1900 bandwidth.full = dfixed_mul(a, disp_clk);
1901 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1903 return dfixed_trunc(bandwidth);
1906 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1908 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1909 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1910 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1911 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1913 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1916 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1918 /* Calculate the display mode Average Bandwidth
1919 * DisplayMode should contain the source and destination dimensions,
1923 fixed20_12 line_time;
1924 fixed20_12 src_width;
1925 fixed20_12 bandwidth;
1928 a.full = dfixed_const(1000);
1929 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1930 line_time.full = dfixed_div(line_time, a);
1931 bpp.full = dfixed_const(wm->bytes_per_pixel);
1932 src_width.full = dfixed_const(wm->src_width);
1933 bandwidth.full = dfixed_mul(src_width, bpp);
1934 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1935 bandwidth.full = dfixed_div(bandwidth, line_time);
1937 return dfixed_trunc(bandwidth);
1940 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1942 /* First calcualte the latency in ns */
1943 u32 mc_latency = 2000; /* 2000 ns. */
1944 u32 available_bandwidth = evergreen_available_bandwidth(wm);
1945 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1946 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1947 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1948 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1949 (wm->num_heads * cursor_line_pair_return_time);
1950 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1951 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1954 if (wm->num_heads == 0)
1957 a.full = dfixed_const(2);
1958 b.full = dfixed_const(1);
1959 if ((wm->vsc.full > a.full) ||
1960 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1962 ((wm->vsc.full >= a.full) && wm->interlaced))
1963 max_src_lines_per_dst_line = 4;
1965 max_src_lines_per_dst_line = 2;
1967 a.full = dfixed_const(available_bandwidth);
1968 b.full = dfixed_const(wm->num_heads);
1969 a.full = dfixed_div(a, b);
1971 b.full = dfixed_const(1000);
1972 c.full = dfixed_const(wm->disp_clk);
1973 b.full = dfixed_div(c, b);
1974 c.full = dfixed_const(wm->bytes_per_pixel);
1975 b.full = dfixed_mul(b, c);
1977 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1979 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1980 b.full = dfixed_const(1000);
1981 c.full = dfixed_const(lb_fill_bw);
1982 b.full = dfixed_div(c, b);
1983 a.full = dfixed_div(a, b);
1984 line_fill_time = dfixed_trunc(a);
1986 if (line_fill_time < wm->active_time)
1989 return latency + (line_fill_time - wm->active_time);
1993 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1995 if (evergreen_average_bandwidth(wm) <=
1996 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2002 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2004 if (evergreen_average_bandwidth(wm) <=
2005 (evergreen_available_bandwidth(wm) / wm->num_heads))
2011 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2013 u32 lb_partitions = wm->lb_size / wm->src_width;
2014 u32 line_time = wm->active_time + wm->blank_time;
2015 u32 latency_tolerant_lines;
2019 a.full = dfixed_const(1);
2020 if (wm->vsc.full > a.full)
2021 latency_tolerant_lines = 1;
2023 if (lb_partitions <= (wm->vtaps + 1))
2024 latency_tolerant_lines = 1;
2026 latency_tolerant_lines = 2;
2029 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2031 if (evergreen_latency_watermark(wm) <= latency_hiding)
2037 static void evergreen_program_watermarks(struct radeon_device *rdev,
2038 struct radeon_crtc *radeon_crtc,
2039 u32 lb_size, u32 num_heads)
2041 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2042 struct evergreen_wm_params wm;
2045 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2046 u32 priority_a_mark = 0, priority_b_mark = 0;
2047 u32 priority_a_cnt = PRIORITY_OFF;
2048 u32 priority_b_cnt = PRIORITY_OFF;
2049 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2050 u32 tmp, arb_control3;
2053 if (radeon_crtc->base.enabled && num_heads && mode) {
2054 pixel_period = 1000000 / (u32)mode->clock;
2055 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2059 wm.yclk = rdev->pm.current_mclk * 10;
2060 wm.sclk = rdev->pm.current_sclk * 10;
2061 wm.disp_clk = mode->clock;
2062 wm.src_width = mode->crtc_hdisplay;
2063 wm.active_time = mode->crtc_hdisplay * pixel_period;
2064 wm.blank_time = line_time - wm.active_time;
2065 wm.interlaced = false;
2066 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2067 wm.interlaced = true;
2068 wm.vsc = radeon_crtc->vsc;
2070 if (radeon_crtc->rmx_type != RMX_OFF)
2072 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2073 wm.lb_size = lb_size;
2074 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2075 wm.num_heads = num_heads;
2077 /* set for high clocks */
2078 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2079 /* set for low clocks */
2080 /* wm.yclk = low clk; wm.sclk = low clk */
2081 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2083 /* possibly force display priority to high */
2084 /* should really do this at mode validation time... */
2085 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2086 !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2087 !evergreen_check_latency_hiding(&wm) ||
2088 (rdev->disp_priority == 2)) {
2089 DRM_DEBUG_KMS("force priority to high\n");
2090 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2091 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2094 a.full = dfixed_const(1000);
2095 b.full = dfixed_const(mode->clock);
2096 b.full = dfixed_div(b, a);
2097 c.full = dfixed_const(latency_watermark_a);
2098 c.full = dfixed_mul(c, b);
2099 c.full = dfixed_mul(c, radeon_crtc->hsc);
2100 c.full = dfixed_div(c, a);
2101 a.full = dfixed_const(16);
2102 c.full = dfixed_div(c, a);
2103 priority_a_mark = dfixed_trunc(c);
2104 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2106 a.full = dfixed_const(1000);
2107 b.full = dfixed_const(mode->clock);
2108 b.full = dfixed_div(b, a);
2109 c.full = dfixed_const(latency_watermark_b);
2110 c.full = dfixed_mul(c, b);
2111 c.full = dfixed_mul(c, radeon_crtc->hsc);
2112 c.full = dfixed_div(c, a);
2113 a.full = dfixed_const(16);
2114 c.full = dfixed_div(c, a);
2115 priority_b_mark = dfixed_trunc(c);
2116 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2120 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2122 tmp &= ~LATENCY_WATERMARK_MASK(3);
2123 tmp |= LATENCY_WATERMARK_MASK(1);
2124 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2125 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2126 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2127 LATENCY_HIGH_WATERMARK(line_time)));
2129 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2130 tmp &= ~LATENCY_WATERMARK_MASK(3);
2131 tmp |= LATENCY_WATERMARK_MASK(2);
2132 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2133 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2134 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2135 LATENCY_HIGH_WATERMARK(line_time)));
2136 /* restore original selection */
2137 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2139 /* write the priority marks */
2140 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2141 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2146 * evergreen_bandwidth_update - update display watermarks callback.
2148 * @rdev: radeon_device pointer
2150 * Update the display watermarks based on the requested mode(s)
2153 void evergreen_bandwidth_update(struct radeon_device *rdev)
2155 struct drm_display_mode *mode0 = NULL;
2156 struct drm_display_mode *mode1 = NULL;
2157 u32 num_heads = 0, lb_size;
2160 radeon_update_display_priority(rdev);
2162 for (i = 0; i < rdev->num_crtc; i++) {
2163 if (rdev->mode_info.crtcs[i]->base.enabled)
2166 for (i = 0; i < rdev->num_crtc; i += 2) {
2167 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2168 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2169 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2170 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2171 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2172 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2177 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2179 * @rdev: radeon_device pointer
2181 * Wait for the MC (memory controller) to be idle.
2183 * Returns 0 if the MC is idle, -1 if not.
2185 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2190 for (i = 0; i < rdev->usec_timeout; i++) {
2191 /* read MC_STATUS */
2192 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2203 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2208 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2210 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2211 for (i = 0; i < rdev->usec_timeout; i++) {
2212 /* read MC_STATUS */
2213 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2214 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2216 DRM_ERROR("[drm] r600 flush TLB failed\n");
2226 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2231 if (rdev->gart.robj == NULL) {
2232 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2235 r = radeon_gart_table_vram_pin(rdev);
2238 radeon_gart_restore(rdev);
2239 /* Setup L2 cache */
2240 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2241 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2242 EFFECTIVE_L2_QUEUE_SIZE(7));
2243 WREG32(VM_L2_CNTL2, 0);
2244 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2245 /* Setup TLB control */
2246 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2247 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2248 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2249 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2250 if (rdev->flags & RADEON_IS_IGP) {
2251 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2252 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2253 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2255 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2256 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2257 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2258 if ((rdev->family == CHIP_JUNIPER) ||
2259 (rdev->family == CHIP_CYPRESS) ||
2260 (rdev->family == CHIP_HEMLOCK) ||
2261 (rdev->family == CHIP_BARTS))
2262 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2264 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2265 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2266 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2267 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2268 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2269 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2270 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2271 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2272 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2273 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2274 (u32)(rdev->dummy_page.addr >> 12));
2275 WREG32(VM_CONTEXT1_CNTL, 0);
2277 evergreen_pcie_gart_tlb_flush(rdev);
2278 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2279 (unsigned)(rdev->mc.gtt_size >> 20),
2280 (unsigned long long)rdev->gart.table_addr);
2281 rdev->gart.ready = true;
2285 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2289 /* Disable all tables */
2290 WREG32(VM_CONTEXT0_CNTL, 0);
2291 WREG32(VM_CONTEXT1_CNTL, 0);
2293 /* Setup L2 cache */
2294 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2295 EFFECTIVE_L2_QUEUE_SIZE(7));
2296 WREG32(VM_L2_CNTL2, 0);
2297 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2298 /* Setup TLB control */
2299 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2300 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2301 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2302 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2303 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2304 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2305 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2306 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2307 radeon_gart_table_vram_unpin(rdev);
2310 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2312 evergreen_pcie_gart_disable(rdev);
2313 radeon_gart_table_vram_free(rdev);
2314 radeon_gart_fini(rdev);
2318 static void evergreen_agp_enable(struct radeon_device *rdev)
2322 /* Setup L2 cache */
2323 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2324 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2325 EFFECTIVE_L2_QUEUE_SIZE(7));
2326 WREG32(VM_L2_CNTL2, 0);
2327 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2328 /* Setup TLB control */
2329 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2330 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2331 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2332 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2333 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2334 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2335 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2336 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2337 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2338 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2339 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2340 WREG32(VM_CONTEXT0_CNTL, 0);
2341 WREG32(VM_CONTEXT1_CNTL, 0);
2344 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2346 u32 crtc_enabled, tmp, frame_count, blackout;
2349 if (!ASIC_IS_NODCE(rdev)) {
2350 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2351 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2353 /* disable VGA render */
2354 WREG32(VGA_RENDER_CONTROL, 0);
2356 /* blank the display controllers */
2357 for (i = 0; i < rdev->num_crtc; i++) {
2358 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2360 save->crtc_enabled[i] = true;
2361 if (ASIC_IS_DCE6(rdev)) {
2362 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2363 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2364 radeon_wait_for_vblank(rdev, i);
2365 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2366 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2367 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2370 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2371 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2372 radeon_wait_for_vblank(rdev, i);
2373 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2374 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2375 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2376 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2379 /* wait for the next frame */
2380 frame_count = radeon_get_vblank_counter(rdev, i);
2381 for (j = 0; j < rdev->usec_timeout; j++) {
2382 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2387 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2388 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2389 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2390 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2391 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2392 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2393 save->crtc_enabled[i] = false;
2396 save->crtc_enabled[i] = false;
2400 radeon_mc_wait_for_idle(rdev);
2402 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2403 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2404 /* Block CPU access */
2405 WREG32(BIF_FB_EN, 0);
2406 /* blackout the MC */
2407 blackout &= ~BLACKOUT_MODE_MASK;
2408 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2410 /* wait for the MC to settle */
2413 /* lock double buffered regs */
2414 for (i = 0; i < rdev->num_crtc; i++) {
2415 if (save->crtc_enabled[i]) {
2416 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2417 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2418 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2419 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2421 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2424 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2430 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2432 u32 tmp, frame_count;
2435 /* update crtc base addresses */
2436 for (i = 0; i < rdev->num_crtc; i++) {
2437 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2438 upper_32_bits(rdev->mc.vram_start));
2439 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2440 upper_32_bits(rdev->mc.vram_start));
2441 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2442 (u32)rdev->mc.vram_start);
2443 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2444 (u32)rdev->mc.vram_start);
2447 if (!ASIC_IS_NODCE(rdev)) {
2448 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2449 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2452 /* unlock regs and wait for update */
2453 for (i = 0; i < rdev->num_crtc; i++) {
2454 if (save->crtc_enabled[i]) {
2455 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2456 if ((tmp & 0x3) != 0) {
2458 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2460 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2461 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2462 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2463 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2465 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2468 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2470 for (j = 0; j < rdev->usec_timeout; j++) {
2471 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2472 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2479 /* unblackout the MC */
2480 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2481 tmp &= ~BLACKOUT_MODE_MASK;
2482 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2483 /* allow CPU access */
2484 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2486 for (i = 0; i < rdev->num_crtc; i++) {
2487 if (save->crtc_enabled[i]) {
2488 if (ASIC_IS_DCE6(rdev)) {
2489 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2490 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2491 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2492 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2493 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2495 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2496 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2497 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2498 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2499 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2501 /* wait for the next frame */
2502 frame_count = radeon_get_vblank_counter(rdev, i);
2503 for (j = 0; j < rdev->usec_timeout; j++) {
2504 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2510 if (!ASIC_IS_NODCE(rdev)) {
2511 /* Unlock vga access */
2512 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2514 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2518 void evergreen_mc_program(struct radeon_device *rdev)
2520 struct evergreen_mc_save save;
2524 /* Initialize HDP */
2525 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2526 WREG32((0x2c14 + j), 0x00000000);
2527 WREG32((0x2c18 + j), 0x00000000);
2528 WREG32((0x2c1c + j), 0x00000000);
2529 WREG32((0x2c20 + j), 0x00000000);
2530 WREG32((0x2c24 + j), 0x00000000);
2532 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2534 evergreen_mc_stop(rdev, &save);
2535 if (evergreen_mc_wait_for_idle(rdev)) {
2536 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2538 /* Lockout access through VGA aperture*/
2539 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2540 /* Update configuration */
2541 if (rdev->flags & RADEON_IS_AGP) {
2542 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2543 /* VRAM before AGP */
2544 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2545 rdev->mc.vram_start >> 12);
2546 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2547 rdev->mc.gtt_end >> 12);
2549 /* VRAM after AGP */
2550 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2551 rdev->mc.gtt_start >> 12);
2552 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2553 rdev->mc.vram_end >> 12);
2556 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2557 rdev->mc.vram_start >> 12);
2558 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2559 rdev->mc.vram_end >> 12);
2561 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2562 /* llano/ontario only */
2563 if ((rdev->family == CHIP_PALM) ||
2564 (rdev->family == CHIP_SUMO) ||
2565 (rdev->family == CHIP_SUMO2)) {
2566 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2567 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2568 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2569 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2571 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2572 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2573 WREG32(MC_VM_FB_LOCATION, tmp);
2574 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2575 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2576 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2577 if (rdev->flags & RADEON_IS_AGP) {
2578 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2579 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2580 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2582 WREG32(MC_VM_AGP_BASE, 0);
2583 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2584 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2586 if (evergreen_mc_wait_for_idle(rdev)) {
2587 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2589 evergreen_mc_resume(rdev, &save);
2590 /* we need to own VRAM, so turn off the VGA renderer here
2591 * to stop it overwriting our objects */
2592 rv515_vga_render_disable(rdev);
2598 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2600 struct radeon_ring *ring = &rdev->ring[ib->ring];
2603 /* set to DX10/11 mode */
2604 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2605 radeon_ring_write(ring, 1);
2607 if (ring->rptr_save_reg) {
2608 next_rptr = ring->wptr + 3 + 4;
2609 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2610 radeon_ring_write(ring, ((ring->rptr_save_reg -
2611 PACKET3_SET_CONFIG_REG_START) >> 2));
2612 radeon_ring_write(ring, next_rptr);
2613 } else if (rdev->wb.enabled) {
2614 next_rptr = ring->wptr + 5 + 4;
2615 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2616 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2617 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2618 radeon_ring_write(ring, next_rptr);
2619 radeon_ring_write(ring, 0);
2622 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2623 radeon_ring_write(ring,
2627 (ib->gpu_addr & 0xFFFFFFFC));
2628 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2629 radeon_ring_write(ring, ib->length_dw);
2633 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2635 const __be32 *fw_data;
2638 if (!rdev->me_fw || !rdev->pfp_fw)
2646 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2648 fw_data = (const __be32 *)rdev->pfp_fw->data;
2649 WREG32(CP_PFP_UCODE_ADDR, 0);
2650 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2651 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2652 WREG32(CP_PFP_UCODE_ADDR, 0);
2654 fw_data = (const __be32 *)rdev->me_fw->data;
2655 WREG32(CP_ME_RAM_WADDR, 0);
2656 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2657 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2659 WREG32(CP_PFP_UCODE_ADDR, 0);
2660 WREG32(CP_ME_RAM_WADDR, 0);
2661 WREG32(CP_ME_RAM_RADDR, 0);
2665 static int evergreen_cp_start(struct radeon_device *rdev)
2667 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2671 r = radeon_ring_lock(rdev, ring, 7);
2673 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2676 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2677 radeon_ring_write(ring, 0x1);
2678 radeon_ring_write(ring, 0x0);
2679 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2680 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2681 radeon_ring_write(ring, 0);
2682 radeon_ring_write(ring, 0);
2683 radeon_ring_unlock_commit(rdev, ring);
2686 WREG32(CP_ME_CNTL, cp_me);
2688 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2690 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2694 /* setup clear context state */
2695 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2696 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2698 for (i = 0; i < evergreen_default_size; i++)
2699 radeon_ring_write(ring, evergreen_default_state[i]);
2701 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2702 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2704 /* set clear context state */
2705 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2706 radeon_ring_write(ring, 0);
2708 /* SQ_VTX_BASE_VTX_LOC */
2709 radeon_ring_write(ring, 0xc0026f00);
2710 radeon_ring_write(ring, 0x00000000);
2711 radeon_ring_write(ring, 0x00000000);
2712 radeon_ring_write(ring, 0x00000000);
2715 radeon_ring_write(ring, 0xc0036f00);
2716 radeon_ring_write(ring, 0x00000bc4);
2717 radeon_ring_write(ring, 0xffffffff);
2718 radeon_ring_write(ring, 0xffffffff);
2719 radeon_ring_write(ring, 0xffffffff);
2721 radeon_ring_write(ring, 0xc0026900);
2722 radeon_ring_write(ring, 0x00000316);
2723 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2724 radeon_ring_write(ring, 0x00000010); /* */
2726 radeon_ring_unlock_commit(rdev, ring);
2731 static int evergreen_cp_resume(struct radeon_device *rdev)
2733 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2738 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2739 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2745 RREG32(GRBM_SOFT_RESET);
2747 WREG32(GRBM_SOFT_RESET, 0);
2748 RREG32(GRBM_SOFT_RESET);
2750 /* Set ring buffer size */
2751 rb_bufsz = drm_order(ring->ring_size / 8);
2752 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2754 tmp |= BUF_SWAP_32BIT;
2756 WREG32(CP_RB_CNTL, tmp);
2757 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2758 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2760 /* Set the write pointer delay */
2761 WREG32(CP_RB_WPTR_DELAY, 0);
2763 /* Initialize the ring buffer's read and write pointers */
2764 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2765 WREG32(CP_RB_RPTR_WR, 0);
2767 WREG32(CP_RB_WPTR, ring->wptr);
2769 /* set the wb address whether it's enabled or not */
2770 WREG32(CP_RB_RPTR_ADDR,
2771 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2772 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2773 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2775 if (rdev->wb.enabled)
2776 WREG32(SCRATCH_UMSK, 0xff);
2778 tmp |= RB_NO_UPDATE;
2779 WREG32(SCRATCH_UMSK, 0);
2783 WREG32(CP_RB_CNTL, tmp);
2785 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2786 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2788 ring->rptr = RREG32(CP_RB_RPTR);
2790 evergreen_cp_start(rdev);
2792 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2794 ring->ready = false;
2803 static void evergreen_gpu_init(struct radeon_device *rdev)
2806 u32 mc_shared_chmap, mc_arb_ramcfg;
2810 u32 sq_lds_resource_mgmt;
2811 u32 sq_gpr_resource_mgmt_1;
2812 u32 sq_gpr_resource_mgmt_2;
2813 u32 sq_gpr_resource_mgmt_3;
2814 u32 sq_thread_resource_mgmt;
2815 u32 sq_thread_resource_mgmt_2;
2816 u32 sq_stack_resource_mgmt_1;
2817 u32 sq_stack_resource_mgmt_2;
2818 u32 sq_stack_resource_mgmt_3;
2819 u32 vgt_cache_invalidation;
2820 u32 hdp_host_path_cntl, tmp;
2821 u32 disabled_rb_mask;
2822 int i, j, num_shader_engines, ps_thread_count;
2824 switch (rdev->family) {
2827 rdev->config.evergreen.num_ses = 2;
2828 rdev->config.evergreen.max_pipes = 4;
2829 rdev->config.evergreen.max_tile_pipes = 8;
2830 rdev->config.evergreen.max_simds = 10;
2831 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2832 rdev->config.evergreen.max_gprs = 256;
2833 rdev->config.evergreen.max_threads = 248;
2834 rdev->config.evergreen.max_gs_threads = 32;
2835 rdev->config.evergreen.max_stack_entries = 512;
2836 rdev->config.evergreen.sx_num_of_sets = 4;
2837 rdev->config.evergreen.sx_max_export_size = 256;
2838 rdev->config.evergreen.sx_max_export_pos_size = 64;
2839 rdev->config.evergreen.sx_max_export_smx_size = 192;
2840 rdev->config.evergreen.max_hw_contexts = 8;
2841 rdev->config.evergreen.sq_num_cf_insts = 2;
2843 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2844 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2845 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2846 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2849 rdev->config.evergreen.num_ses = 1;
2850 rdev->config.evergreen.max_pipes = 4;
2851 rdev->config.evergreen.max_tile_pipes = 4;
2852 rdev->config.evergreen.max_simds = 10;
2853 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2854 rdev->config.evergreen.max_gprs = 256;
2855 rdev->config.evergreen.max_threads = 248;
2856 rdev->config.evergreen.max_gs_threads = 32;
2857 rdev->config.evergreen.max_stack_entries = 512;
2858 rdev->config.evergreen.sx_num_of_sets = 4;
2859 rdev->config.evergreen.sx_max_export_size = 256;
2860 rdev->config.evergreen.sx_max_export_pos_size = 64;
2861 rdev->config.evergreen.sx_max_export_smx_size = 192;
2862 rdev->config.evergreen.max_hw_contexts = 8;
2863 rdev->config.evergreen.sq_num_cf_insts = 2;
2865 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2866 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2867 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2868 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
2871 rdev->config.evergreen.num_ses = 1;
2872 rdev->config.evergreen.max_pipes = 4;
2873 rdev->config.evergreen.max_tile_pipes = 4;
2874 rdev->config.evergreen.max_simds = 5;
2875 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2876 rdev->config.evergreen.max_gprs = 256;
2877 rdev->config.evergreen.max_threads = 248;
2878 rdev->config.evergreen.max_gs_threads = 32;
2879 rdev->config.evergreen.max_stack_entries = 256;
2880 rdev->config.evergreen.sx_num_of_sets = 4;
2881 rdev->config.evergreen.sx_max_export_size = 256;
2882 rdev->config.evergreen.sx_max_export_pos_size = 64;
2883 rdev->config.evergreen.sx_max_export_smx_size = 192;
2884 rdev->config.evergreen.max_hw_contexts = 8;
2885 rdev->config.evergreen.sq_num_cf_insts = 2;
2887 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2888 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2889 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2890 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
2894 rdev->config.evergreen.num_ses = 1;
2895 rdev->config.evergreen.max_pipes = 2;
2896 rdev->config.evergreen.max_tile_pipes = 2;
2897 rdev->config.evergreen.max_simds = 2;
2898 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2899 rdev->config.evergreen.max_gprs = 256;
2900 rdev->config.evergreen.max_threads = 192;
2901 rdev->config.evergreen.max_gs_threads = 16;
2902 rdev->config.evergreen.max_stack_entries = 256;
2903 rdev->config.evergreen.sx_num_of_sets = 4;
2904 rdev->config.evergreen.sx_max_export_size = 128;
2905 rdev->config.evergreen.sx_max_export_pos_size = 32;
2906 rdev->config.evergreen.sx_max_export_smx_size = 96;
2907 rdev->config.evergreen.max_hw_contexts = 4;
2908 rdev->config.evergreen.sq_num_cf_insts = 1;
2910 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2911 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2912 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2913 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2916 rdev->config.evergreen.num_ses = 1;
2917 rdev->config.evergreen.max_pipes = 2;
2918 rdev->config.evergreen.max_tile_pipes = 2;
2919 rdev->config.evergreen.max_simds = 2;
2920 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2921 rdev->config.evergreen.max_gprs = 256;
2922 rdev->config.evergreen.max_threads = 192;
2923 rdev->config.evergreen.max_gs_threads = 16;
2924 rdev->config.evergreen.max_stack_entries = 256;
2925 rdev->config.evergreen.sx_num_of_sets = 4;
2926 rdev->config.evergreen.sx_max_export_size = 128;
2927 rdev->config.evergreen.sx_max_export_pos_size = 32;
2928 rdev->config.evergreen.sx_max_export_smx_size = 96;
2929 rdev->config.evergreen.max_hw_contexts = 4;
2930 rdev->config.evergreen.sq_num_cf_insts = 1;
2932 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2933 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2934 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2935 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2938 rdev->config.evergreen.num_ses = 1;
2939 rdev->config.evergreen.max_pipes = 4;
2940 rdev->config.evergreen.max_tile_pipes = 4;
2941 if (rdev->ddev->pci_device == 0x9648)
2942 rdev->config.evergreen.max_simds = 3;
2943 else if ((rdev->ddev->pci_device == 0x9647) ||
2944 (rdev->ddev->pci_device == 0x964a))
2945 rdev->config.evergreen.max_simds = 4;
2947 rdev->config.evergreen.max_simds = 5;
2948 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2949 rdev->config.evergreen.max_gprs = 256;
2950 rdev->config.evergreen.max_threads = 248;
2951 rdev->config.evergreen.max_gs_threads = 32;
2952 rdev->config.evergreen.max_stack_entries = 256;
2953 rdev->config.evergreen.sx_num_of_sets = 4;
2954 rdev->config.evergreen.sx_max_export_size = 256;
2955 rdev->config.evergreen.sx_max_export_pos_size = 64;
2956 rdev->config.evergreen.sx_max_export_smx_size = 192;
2957 rdev->config.evergreen.max_hw_contexts = 8;
2958 rdev->config.evergreen.sq_num_cf_insts = 2;
2960 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2961 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2962 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2963 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
2966 rdev->config.evergreen.num_ses = 1;
2967 rdev->config.evergreen.max_pipes = 4;
2968 rdev->config.evergreen.max_tile_pipes = 4;
2969 rdev->config.evergreen.max_simds = 2;
2970 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2971 rdev->config.evergreen.max_gprs = 256;
2972 rdev->config.evergreen.max_threads = 248;
2973 rdev->config.evergreen.max_gs_threads = 32;
2974 rdev->config.evergreen.max_stack_entries = 512;
2975 rdev->config.evergreen.sx_num_of_sets = 4;
2976 rdev->config.evergreen.sx_max_export_size = 256;
2977 rdev->config.evergreen.sx_max_export_pos_size = 64;
2978 rdev->config.evergreen.sx_max_export_smx_size = 192;
2979 rdev->config.evergreen.max_hw_contexts = 8;
2980 rdev->config.evergreen.sq_num_cf_insts = 2;
2982 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2983 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2984 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2985 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
2988 rdev->config.evergreen.num_ses = 2;
2989 rdev->config.evergreen.max_pipes = 4;
2990 rdev->config.evergreen.max_tile_pipes = 8;
2991 rdev->config.evergreen.max_simds = 7;
2992 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2993 rdev->config.evergreen.max_gprs = 256;
2994 rdev->config.evergreen.max_threads = 248;
2995 rdev->config.evergreen.max_gs_threads = 32;
2996 rdev->config.evergreen.max_stack_entries = 512;
2997 rdev->config.evergreen.sx_num_of_sets = 4;
2998 rdev->config.evergreen.sx_max_export_size = 256;
2999 rdev->config.evergreen.sx_max_export_pos_size = 64;
3000 rdev->config.evergreen.sx_max_export_smx_size = 192;
3001 rdev->config.evergreen.max_hw_contexts = 8;
3002 rdev->config.evergreen.sq_num_cf_insts = 2;
3004 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3005 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3006 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3007 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3010 rdev->config.evergreen.num_ses = 1;
3011 rdev->config.evergreen.max_pipes = 4;
3012 rdev->config.evergreen.max_tile_pipes = 4;
3013 rdev->config.evergreen.max_simds = 6;
3014 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3015 rdev->config.evergreen.max_gprs = 256;
3016 rdev->config.evergreen.max_threads = 248;
3017 rdev->config.evergreen.max_gs_threads = 32;
3018 rdev->config.evergreen.max_stack_entries = 256;
3019 rdev->config.evergreen.sx_num_of_sets = 4;
3020 rdev->config.evergreen.sx_max_export_size = 256;
3021 rdev->config.evergreen.sx_max_export_pos_size = 64;
3022 rdev->config.evergreen.sx_max_export_smx_size = 192;
3023 rdev->config.evergreen.max_hw_contexts = 8;
3024 rdev->config.evergreen.sq_num_cf_insts = 2;
3026 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3027 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3028 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3029 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3032 rdev->config.evergreen.num_ses = 1;
3033 rdev->config.evergreen.max_pipes = 2;
3034 rdev->config.evergreen.max_tile_pipes = 2;
3035 rdev->config.evergreen.max_simds = 2;
3036 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3037 rdev->config.evergreen.max_gprs = 256;
3038 rdev->config.evergreen.max_threads = 192;
3039 rdev->config.evergreen.max_gs_threads = 16;
3040 rdev->config.evergreen.max_stack_entries = 256;
3041 rdev->config.evergreen.sx_num_of_sets = 4;
3042 rdev->config.evergreen.sx_max_export_size = 128;
3043 rdev->config.evergreen.sx_max_export_pos_size = 32;
3044 rdev->config.evergreen.sx_max_export_smx_size = 96;
3045 rdev->config.evergreen.max_hw_contexts = 4;
3046 rdev->config.evergreen.sq_num_cf_insts = 1;
3048 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3049 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3050 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3051 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3055 /* Initialize HDP */
3056 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3057 WREG32((0x2c14 + j), 0x00000000);
3058 WREG32((0x2c18 + j), 0x00000000);
3059 WREG32((0x2c1c + j), 0x00000000);
3060 WREG32((0x2c20 + j), 0x00000000);
3061 WREG32((0x2c24 + j), 0x00000000);
3064 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3066 evergreen_fix_pci_max_read_req_size(rdev);
3068 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3069 if ((rdev->family == CHIP_PALM) ||
3070 (rdev->family == CHIP_SUMO) ||
3071 (rdev->family == CHIP_SUMO2))
3072 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3074 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3076 /* setup tiling info dword. gb_addr_config is not adequate since it does
3077 * not have bank info, so create a custom tiling dword.
3078 * bits 3:0 num_pipes
3079 * bits 7:4 num_banks
3080 * bits 11:8 group_size
3081 * bits 15:12 row_size
3083 rdev->config.evergreen.tile_config = 0;
3084 switch (rdev->config.evergreen.max_tile_pipes) {
3087 rdev->config.evergreen.tile_config |= (0 << 0);
3090 rdev->config.evergreen.tile_config |= (1 << 0);
3093 rdev->config.evergreen.tile_config |= (2 << 0);
3096 rdev->config.evergreen.tile_config |= (3 << 0);
3099 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3100 if (rdev->flags & RADEON_IS_IGP)
3101 rdev->config.evergreen.tile_config |= 1 << 4;
3103 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3104 case 0: /* four banks */
3105 rdev->config.evergreen.tile_config |= 0 << 4;
3107 case 1: /* eight banks */
3108 rdev->config.evergreen.tile_config |= 1 << 4;
3110 case 2: /* sixteen banks */
3112 rdev->config.evergreen.tile_config |= 2 << 4;
3116 rdev->config.evergreen.tile_config |= 0 << 8;
3117 rdev->config.evergreen.tile_config |=
3118 ((gb_addr_config & 0x30000000) >> 28) << 12;
3120 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3122 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3126 WREG32(RCU_IND_INDEX, 0x204);
3127 efuse_straps_4 = RREG32(RCU_IND_DATA);
3128 WREG32(RCU_IND_INDEX, 0x203);
3129 efuse_straps_3 = RREG32(RCU_IND_DATA);
3130 tmp = (((efuse_straps_4 & 0xf) << 4) |
3131 ((efuse_straps_3 & 0xf0000000) >> 28));
3134 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3135 u32 rb_disable_bitmap;
3137 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3138 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3139 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3141 tmp |= rb_disable_bitmap;
3144 /* enabled rb are just the one not disabled :) */
3145 disabled_rb_mask = tmp;
3147 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3149 /* if all the backends are disabled, fix it up here */
3150 if ((disabled_rb_mask & tmp) == tmp) {
3151 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3152 disabled_rb_mask &= ~(1 << i);
3155 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3156 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3158 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3159 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3160 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3161 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3162 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3163 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3164 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3166 if ((rdev->config.evergreen.max_backends == 1) &&
3167 (rdev->flags & RADEON_IS_IGP)) {
3168 if ((disabled_rb_mask & 3) == 1) {
3169 /* RB0 disabled, RB1 enabled */
3172 /* RB1 disabled, RB0 enabled */
3176 tmp = gb_addr_config & NUM_PIPES_MASK;
3177 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3178 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3180 WREG32(GB_BACKEND_MAP, tmp);
3182 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3183 WREG32(CGTS_TCC_DISABLE, 0);
3184 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3185 WREG32(CGTS_USER_TCC_DISABLE, 0);
3187 /* set HW defaults for 3D engine */
3188 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3189 ROQ_IB2_START(0x2b)));
3191 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3193 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3198 sx_debug_1 = RREG32(SX_DEBUG_1);
3199 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3200 WREG32(SX_DEBUG_1, sx_debug_1);
3203 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3204 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3205 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3206 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3208 if (rdev->family <= CHIP_SUMO2)
3209 WREG32(SMX_SAR_CTL0, 0x00010000);
3211 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3212 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3213 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3215 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3216 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3217 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3219 WREG32(VGT_NUM_INSTANCES, 1);
3220 WREG32(SPI_CONFIG_CNTL, 0);
3221 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3222 WREG32(CP_PERFMON_CNTL, 0);
3224 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3225 FETCH_FIFO_HIWATER(0x4) |
3226 DONE_FIFO_HIWATER(0xe0) |
3227 ALU_UPDATE_FIFO_HIWATER(0x8)));
3229 sq_config = RREG32(SQ_CONFIG);
3230 sq_config &= ~(PS_PRIO(3) |
3234 sq_config |= (VC_ENABLE |
3241 switch (rdev->family) {
3247 /* no vertex cache */
3248 sq_config &= ~VC_ENABLE;
3254 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3256 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3257 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3258 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3259 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3260 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3261 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3262 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3264 switch (rdev->family) {
3269 ps_thread_count = 96;
3272 ps_thread_count = 128;
3276 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3277 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3278 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3279 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3280 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3281 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3283 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3284 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3285 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3286 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3287 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3288 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3290 WREG32(SQ_CONFIG, sq_config);
3291 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3292 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3293 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3294 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3295 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3296 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3297 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3298 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3299 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3300 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3302 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3303 FORCE_EOV_MAX_REZ_CNT(255)));
3305 switch (rdev->family) {
3311 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3314 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3317 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3318 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3320 WREG32(VGT_GS_VERTEX_REUSE, 16);
3321 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3322 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3324 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3325 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3327 WREG32(CB_PERF_CTR0_SEL_0, 0);
3328 WREG32(CB_PERF_CTR0_SEL_1, 0);
3329 WREG32(CB_PERF_CTR1_SEL_0, 0);
3330 WREG32(CB_PERF_CTR1_SEL_1, 0);
3331 WREG32(CB_PERF_CTR2_SEL_0, 0);
3332 WREG32(CB_PERF_CTR2_SEL_1, 0);
3333 WREG32(CB_PERF_CTR3_SEL_0, 0);
3334 WREG32(CB_PERF_CTR3_SEL_1, 0);
3336 /* clear render buffer base addresses */
3337 WREG32(CB_COLOR0_BASE, 0);
3338 WREG32(CB_COLOR1_BASE, 0);
3339 WREG32(CB_COLOR2_BASE, 0);
3340 WREG32(CB_COLOR3_BASE, 0);
3341 WREG32(CB_COLOR4_BASE, 0);
3342 WREG32(CB_COLOR5_BASE, 0);
3343 WREG32(CB_COLOR6_BASE, 0);
3344 WREG32(CB_COLOR7_BASE, 0);
3345 WREG32(CB_COLOR8_BASE, 0);
3346 WREG32(CB_COLOR9_BASE, 0);
3347 WREG32(CB_COLOR10_BASE, 0);
3348 WREG32(CB_COLOR11_BASE, 0);
3350 /* set the shader const cache sizes to 0 */
3351 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3353 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3356 tmp = RREG32(HDP_MISC_CNTL);
3357 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3358 WREG32(HDP_MISC_CNTL, tmp);
3360 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3361 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3363 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3369 int evergreen_mc_init(struct radeon_device *rdev)
3372 int chansize, numchan;
3374 /* Get VRAM informations */
3375 rdev->mc.vram_is_ddr = true;
3376 if ((rdev->family == CHIP_PALM) ||
3377 (rdev->family == CHIP_SUMO) ||
3378 (rdev->family == CHIP_SUMO2))
3379 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3381 tmp = RREG32(MC_ARB_RAMCFG);
3382 if (tmp & CHANSIZE_OVERRIDE) {
3384 } else if (tmp & CHANSIZE_MASK) {
3389 tmp = RREG32(MC_SHARED_CHMAP);
3390 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3405 rdev->mc.vram_width = numchan * chansize;
3406 /* Could aper size report 0 ? */
3407 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
3408 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
3409 /* Setup GPU memory space */
3410 if ((rdev->family == CHIP_PALM) ||
3411 (rdev->family == CHIP_SUMO) ||
3412 (rdev->family == CHIP_SUMO2)) {
3413 /* size in bytes on fusion */
3414 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3415 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3417 /* size in MB on evergreen/cayman/tn */
3418 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3419 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3421 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3422 r700_vram_gtt_location(rdev, &rdev->mc);
3423 radeon_update_bandwidth_info(rdev);
3428 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3430 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3431 RREG32(GRBM_STATUS));
3432 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3433 RREG32(GRBM_STATUS_SE0));
3434 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3435 RREG32(GRBM_STATUS_SE1));
3436 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3437 RREG32(SRBM_STATUS));
3438 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3439 RREG32(SRBM_STATUS2));
3440 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3441 RREG32(CP_STALLED_STAT1));
3442 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3443 RREG32(CP_STALLED_STAT2));
3444 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3445 RREG32(CP_BUSY_STAT));
3446 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3448 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3449 RREG32(DMA_STATUS_REG));
3450 if (rdev->family >= CHIP_CAYMAN) {
3451 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3452 RREG32(DMA_STATUS_REG + 0x800));
3456 bool evergreen_is_display_hung(struct radeon_device *rdev)
3462 for (i = 0; i < rdev->num_crtc; i++) {
3463 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3464 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3465 crtc_hung |= (1 << i);
3469 for (j = 0; j < 10; j++) {
3470 for (i = 0; i < rdev->num_crtc; i++) {
3471 if (crtc_hung & (1 << i)) {
3472 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3473 if (tmp != crtc_status[i])
3474 crtc_hung &= ~(1 << i);
3485 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3491 tmp = RREG32(GRBM_STATUS);
3492 if (tmp & (PA_BUSY | SC_BUSY |
3494 TA_BUSY | VGT_BUSY |
3496 SPI_BUSY | VGT_BUSY_NO_DMA))
3497 reset_mask |= RADEON_RESET_GFX;
3499 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3500 CP_BUSY | CP_COHERENCY_BUSY))
3501 reset_mask |= RADEON_RESET_CP;
3503 if (tmp & GRBM_EE_BUSY)
3504 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3506 /* DMA_STATUS_REG */
3507 tmp = RREG32(DMA_STATUS_REG);
3508 if (!(tmp & DMA_IDLE))
3509 reset_mask |= RADEON_RESET_DMA;
3512 tmp = RREG32(SRBM_STATUS2);
3514 reset_mask |= RADEON_RESET_DMA;
3517 tmp = RREG32(SRBM_STATUS);
3518 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3519 reset_mask |= RADEON_RESET_RLC;
3522 reset_mask |= RADEON_RESET_IH;
3525 reset_mask |= RADEON_RESET_SEM;
3527 if (tmp & GRBM_RQ_PENDING)
3528 reset_mask |= RADEON_RESET_GRBM;
3531 reset_mask |= RADEON_RESET_VMC;
3533 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3534 MCC_BUSY | MCD_BUSY))
3535 reset_mask |= RADEON_RESET_MC;
3537 if (evergreen_is_display_hung(rdev))
3538 reset_mask |= RADEON_RESET_DISPLAY;
3541 tmp = RREG32(VM_L2_STATUS);
3543 reset_mask |= RADEON_RESET_VMC;
3545 /* Skip MC reset as it's mostly likely not hung, just busy */
3546 if (reset_mask & RADEON_RESET_MC) {
3547 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3548 reset_mask &= ~RADEON_RESET_MC;
3554 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3556 struct evergreen_mc_save save;
3557 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3560 if (reset_mask == 0)
3563 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3565 evergreen_print_gpu_status_regs(rdev);
3567 /* Disable CP parsing/prefetching */
3568 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3570 if (reset_mask & RADEON_RESET_DMA) {
3572 tmp = RREG32(DMA_RB_CNTL);
3573 tmp &= ~DMA_RB_ENABLE;
3574 WREG32(DMA_RB_CNTL, tmp);
3579 evergreen_mc_stop(rdev, &save);
3580 if (evergreen_mc_wait_for_idle(rdev)) {
3581 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3584 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3585 grbm_soft_reset |= SOFT_RESET_DB |
3598 if (reset_mask & RADEON_RESET_CP) {
3599 grbm_soft_reset |= SOFT_RESET_CP |
3602 srbm_soft_reset |= SOFT_RESET_GRBM;
3605 if (reset_mask & RADEON_RESET_DMA)
3606 srbm_soft_reset |= SOFT_RESET_DMA;
3608 if (reset_mask & RADEON_RESET_DISPLAY)
3609 srbm_soft_reset |= SOFT_RESET_DC;
3611 if (reset_mask & RADEON_RESET_RLC)
3612 srbm_soft_reset |= SOFT_RESET_RLC;
3614 if (reset_mask & RADEON_RESET_SEM)
3615 srbm_soft_reset |= SOFT_RESET_SEM;
3617 if (reset_mask & RADEON_RESET_IH)
3618 srbm_soft_reset |= SOFT_RESET_IH;
3620 if (reset_mask & RADEON_RESET_GRBM)
3621 srbm_soft_reset |= SOFT_RESET_GRBM;
3623 if (reset_mask & RADEON_RESET_VMC)
3624 srbm_soft_reset |= SOFT_RESET_VMC;
3626 if (!(rdev->flags & RADEON_IS_IGP)) {
3627 if (reset_mask & RADEON_RESET_MC)
3628 srbm_soft_reset |= SOFT_RESET_MC;
3631 if (grbm_soft_reset) {
3632 tmp = RREG32(GRBM_SOFT_RESET);
3633 tmp |= grbm_soft_reset;
3634 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3635 WREG32(GRBM_SOFT_RESET, tmp);
3636 tmp = RREG32(GRBM_SOFT_RESET);
3640 tmp &= ~grbm_soft_reset;
3641 WREG32(GRBM_SOFT_RESET, tmp);
3642 tmp = RREG32(GRBM_SOFT_RESET);
3645 if (srbm_soft_reset) {
3646 tmp = RREG32(SRBM_SOFT_RESET);
3647 tmp |= srbm_soft_reset;
3648 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3649 WREG32(SRBM_SOFT_RESET, tmp);
3650 tmp = RREG32(SRBM_SOFT_RESET);
3654 tmp &= ~srbm_soft_reset;
3655 WREG32(SRBM_SOFT_RESET, tmp);
3656 tmp = RREG32(SRBM_SOFT_RESET);
3659 /* Wait a little for things to settle down */
3662 evergreen_mc_resume(rdev, &save);
3665 evergreen_print_gpu_status_regs(rdev);
3668 int evergreen_asic_reset(struct radeon_device *rdev)
3672 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3675 r600_set_bios_scratch_engine_hung(rdev, true);
3677 evergreen_gpu_soft_reset(rdev, reset_mask);
3679 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3682 r600_set_bios_scratch_engine_hung(rdev, false);
3688 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3690 * @rdev: radeon_device pointer
3691 * @ring: radeon_ring structure holding ring information
3693 * Check if the GFX engine is locked up.
3694 * Returns true if the engine appears to be locked up, false if not.
3696 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3698 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3700 if (!(reset_mask & (RADEON_RESET_GFX |
3701 RADEON_RESET_COMPUTE |
3702 RADEON_RESET_CP))) {
3703 radeon_ring_lockup_update(ring);
3706 /* force CP activities */
3707 radeon_ring_force_activity(rdev, ring);
3708 return radeon_ring_test_lockup(rdev, ring);
3712 * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3714 * @rdev: radeon_device pointer
3715 * @ring: radeon_ring structure holding ring information
3717 * Check if the async DMA engine is locked up.
3718 * Returns true if the engine appears to be locked up, false if not.
3720 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3722 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3724 if (!(reset_mask & RADEON_RESET_DMA)) {
3725 radeon_ring_lockup_update(ring);
3728 /* force ring activities */
3729 radeon_ring_force_activity(rdev, ring);
3730 return radeon_ring_test_lockup(rdev, ring);
3735 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3737 if (crtc >= rdev->num_crtc)
3740 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
3743 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3747 if (rdev->family >= CHIP_CAYMAN) {
3748 cayman_cp_int_cntl_setup(rdev, 0,
3749 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3750 cayman_cp_int_cntl_setup(rdev, 1, 0);
3751 cayman_cp_int_cntl_setup(rdev, 2, 0);
3752 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3753 WREG32(CAYMAN_DMA1_CNTL, tmp);
3755 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3756 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3757 WREG32(DMA_CNTL, tmp);
3758 WREG32(GRBM_INT_CNTL, 0);
3759 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3760 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3761 if (rdev->num_crtc >= 4) {
3762 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3763 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3765 if (rdev->num_crtc >= 6) {
3766 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3767 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3770 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3771 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3772 if (rdev->num_crtc >= 4) {
3773 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3774 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3776 if (rdev->num_crtc >= 6) {
3777 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3778 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3781 /* only one DAC on DCE6 */
3782 if (!ASIC_IS_DCE6(rdev))
3783 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3784 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3786 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3787 WREG32(DC_HPD1_INT_CONTROL, tmp);
3788 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3789 WREG32(DC_HPD2_INT_CONTROL, tmp);
3790 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3791 WREG32(DC_HPD3_INT_CONTROL, tmp);
3792 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3793 WREG32(DC_HPD4_INT_CONTROL, tmp);
3794 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3795 WREG32(DC_HPD5_INT_CONTROL, tmp);
3796 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3797 WREG32(DC_HPD6_INT_CONTROL, tmp);
3801 int evergreen_irq_set(struct radeon_device *rdev)
3803 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3804 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3805 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3806 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3807 u32 grbm_int_cntl = 0;
3808 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3809 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3810 u32 dma_cntl, dma_cntl1 = 0;
3812 if (!rdev->irq.installed) {
3813 dev_warn(rdev->dev, "Can't enable IRQ/MSI because no handler is installed\n");
3816 /* don't enable anything if the ih is disabled */
3817 if (!rdev->ih.enabled) {
3818 r600_disable_interrupts(rdev);
3819 /* force the active interrupt state to all disabled */
3820 evergreen_disable_interrupt_state(rdev);
3824 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3825 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3826 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3827 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3828 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3829 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3831 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3832 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3833 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3834 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3835 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3836 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3838 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3840 if (rdev->family >= CHIP_CAYMAN) {
3841 /* enable CP interrupts on all rings */
3842 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3843 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3844 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3846 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3847 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3848 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3850 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3851 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3852 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3855 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3856 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3857 cp_int_cntl |= RB_INT_ENABLE;
3858 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3862 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3863 DRM_DEBUG("r600_irq_set: sw int dma\n");
3864 dma_cntl |= TRAP_ENABLE;
3867 if (rdev->family >= CHIP_CAYMAN) {
3868 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3869 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3870 DRM_DEBUG("r600_irq_set: sw int dma1\n");
3871 dma_cntl1 |= TRAP_ENABLE;
3875 if (rdev->irq.crtc_vblank_int[0] ||
3876 atomic_read(&rdev->irq.pflip[0])) {
3877 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3878 crtc1 |= VBLANK_INT_MASK;
3880 if (rdev->irq.crtc_vblank_int[1] ||
3881 atomic_read(&rdev->irq.pflip[1])) {
3882 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3883 crtc2 |= VBLANK_INT_MASK;
3885 if (rdev->irq.crtc_vblank_int[2] ||
3886 atomic_read(&rdev->irq.pflip[2])) {
3887 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3888 crtc3 |= VBLANK_INT_MASK;
3890 if (rdev->irq.crtc_vblank_int[3] ||
3891 atomic_read(&rdev->irq.pflip[3])) {
3892 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3893 crtc4 |= VBLANK_INT_MASK;
3895 if (rdev->irq.crtc_vblank_int[4] ||
3896 atomic_read(&rdev->irq.pflip[4])) {
3897 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3898 crtc5 |= VBLANK_INT_MASK;
3900 if (rdev->irq.crtc_vblank_int[5] ||
3901 atomic_read(&rdev->irq.pflip[5])) {
3902 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3903 crtc6 |= VBLANK_INT_MASK;
3905 if (rdev->irq.hpd[0]) {
3906 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3907 hpd1 |= DC_HPDx_INT_EN;
3909 if (rdev->irq.hpd[1]) {
3910 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3911 hpd2 |= DC_HPDx_INT_EN;
3913 if (rdev->irq.hpd[2]) {
3914 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3915 hpd3 |= DC_HPDx_INT_EN;
3917 if (rdev->irq.hpd[3]) {
3918 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3919 hpd4 |= DC_HPDx_INT_EN;
3921 if (rdev->irq.hpd[4]) {
3922 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3923 hpd5 |= DC_HPDx_INT_EN;
3925 if (rdev->irq.hpd[5]) {
3926 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3927 hpd6 |= DC_HPDx_INT_EN;
3929 if (rdev->irq.afmt[0]) {
3930 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3931 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3933 if (rdev->irq.afmt[1]) {
3934 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3935 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3937 if (rdev->irq.afmt[2]) {
3938 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3939 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3941 if (rdev->irq.afmt[3]) {
3942 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3943 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3945 if (rdev->irq.afmt[4]) {
3946 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3947 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3949 if (rdev->irq.afmt[5]) {
3950 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3951 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3954 if (rdev->family >= CHIP_CAYMAN) {
3955 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3956 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3957 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3959 WREG32(CP_INT_CNTL, cp_int_cntl);
3961 WREG32(DMA_CNTL, dma_cntl);
3963 if (rdev->family >= CHIP_CAYMAN)
3964 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3966 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3968 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3969 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3970 if (rdev->num_crtc >= 4) {
3971 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3972 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3974 if (rdev->num_crtc >= 6) {
3975 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3976 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3979 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3980 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3981 if (rdev->num_crtc >= 4) {
3982 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3983 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3985 if (rdev->num_crtc >= 6) {
3986 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3987 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3990 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3991 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3992 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3993 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3994 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3995 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3997 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
3998 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
3999 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4000 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4001 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4002 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4007 static void evergreen_irq_ack(struct radeon_device *rdev)
4011 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4012 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4013 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4014 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4015 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4016 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4017 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4018 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4019 if (rdev->num_crtc >= 4) {
4020 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4021 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4023 if (rdev->num_crtc >= 6) {
4024 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4025 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4028 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4029 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4030 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4031 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4032 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4033 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4035 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4036 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4037 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4038 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4039 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4040 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4041 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4042 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4043 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4044 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4045 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4046 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4048 if (rdev->num_crtc >= 4) {
4049 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4050 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4051 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4052 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4053 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4054 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4055 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4056 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4057 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4058 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4059 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4060 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4063 if (rdev->num_crtc >= 6) {
4064 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4065 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4066 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4067 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4068 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4069 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4070 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4071 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4072 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4073 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4074 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4075 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4078 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4079 tmp = RREG32(DC_HPD1_INT_CONTROL);
4080 tmp |= DC_HPDx_INT_ACK;
4081 WREG32(DC_HPD1_INT_CONTROL, tmp);
4083 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4084 tmp = RREG32(DC_HPD2_INT_CONTROL);
4085 tmp |= DC_HPDx_INT_ACK;
4086 WREG32(DC_HPD2_INT_CONTROL, tmp);
4088 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4089 tmp = RREG32(DC_HPD3_INT_CONTROL);
4090 tmp |= DC_HPDx_INT_ACK;
4091 WREG32(DC_HPD3_INT_CONTROL, tmp);
4093 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4094 tmp = RREG32(DC_HPD4_INT_CONTROL);
4095 tmp |= DC_HPDx_INT_ACK;
4096 WREG32(DC_HPD4_INT_CONTROL, tmp);
4098 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4099 tmp = RREG32(DC_HPD5_INT_CONTROL);
4100 tmp |= DC_HPDx_INT_ACK;
4101 WREG32(DC_HPD5_INT_CONTROL, tmp);
4103 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4104 tmp = RREG32(DC_HPD5_INT_CONTROL);
4105 tmp |= DC_HPDx_INT_ACK;
4106 WREG32(DC_HPD6_INT_CONTROL, tmp);
4108 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4109 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4110 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4111 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4113 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4114 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4115 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4116 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4118 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4119 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4120 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4121 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4123 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4124 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4125 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4126 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4128 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4129 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4130 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4131 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4133 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4134 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4135 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4136 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4140 static void evergreen_irq_disable(struct radeon_device *rdev)
4142 r600_disable_interrupts(rdev);
4143 /* Wait and acknowledge irq */
4145 evergreen_irq_ack(rdev);
4146 evergreen_disable_interrupt_state(rdev);
4149 void evergreen_irq_suspend(struct radeon_device *rdev)
4151 evergreen_irq_disable(rdev);
4152 r600_rlc_stop(rdev);
4155 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4159 if (rdev->wb.enabled)
4160 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4162 wptr = RREG32(IH_RB_WPTR);
4164 if (wptr & RB_OVERFLOW) {
4165 /* When a ring buffer overflow happen start parsing interrupt
4166 * from the last not overwritten vector (wptr + 16). Hopefully
4167 * this should allow us to catchup.
4169 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4170 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4171 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4172 tmp = RREG32(IH_RB_CNTL);
4173 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4174 WREG32(IH_RB_CNTL, tmp);
4176 return (wptr & rdev->ih.ptr_mask);
4179 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
4183 u32 src_id, src_data;
4185 bool queue_hotplug = false;
4186 bool queue_hdmi = false;
4188 if (!rdev->ih.enabled || rdev->shutdown)
4191 wptr = evergreen_get_ih_wptr(rdev);
4194 /* is somebody else already processing irqs? */
4195 if (atomic_xchg(&rdev->ih.lock, 1))
4198 rptr = rdev->ih.rptr;
4199 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4201 /* Order reading of wptr vs. reading of IH ring data */
4204 /* display interrupts */
4205 evergreen_irq_ack(rdev);
4207 while (rptr != wptr) {
4208 /* wptr/rptr are in bytes! */
4209 ring_index = rptr / 4;
4210 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4211 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4214 case 1: /* D1 vblank/vline */
4216 case 0: /* D1 vblank */
4217 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4218 if (rdev->irq.crtc_vblank_int[0]) {
4219 drm_handle_vblank(rdev->ddev, 0);
4220 rdev->pm.vblank_sync = true;
4221 DRM_WAKEUP(&rdev->irq.vblank_queue);
4223 if (atomic_read(&rdev->irq.pflip[0]))
4224 radeon_crtc_handle_flip(rdev, 0);
4225 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4226 DRM_DEBUG("IH: D1 vblank\n");
4229 case 1: /* D1 vline */
4230 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4231 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4232 DRM_DEBUG("IH: D1 vline\n");
4236 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4240 case 2: /* D2 vblank/vline */
4242 case 0: /* D2 vblank */
4243 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4244 if (rdev->irq.crtc_vblank_int[1]) {
4245 drm_handle_vblank(rdev->ddev, 1);
4246 rdev->pm.vblank_sync = true;
4247 DRM_WAKEUP(&rdev->irq.vblank_queue);
4249 if (atomic_read(&rdev->irq.pflip[1]))
4250 radeon_crtc_handle_flip(rdev, 1);
4251 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4252 DRM_DEBUG("IH: D2 vblank\n");
4255 case 1: /* D2 vline */
4256 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4257 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4258 DRM_DEBUG("IH: D2 vline\n");
4262 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4266 case 3: /* D3 vblank/vline */
4268 case 0: /* D3 vblank */
4269 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4270 if (rdev->irq.crtc_vblank_int[2]) {
4271 drm_handle_vblank(rdev->ddev, 2);
4272 rdev->pm.vblank_sync = true;
4273 DRM_WAKEUP(&rdev->irq.vblank_queue);
4275 if (atomic_read(&rdev->irq.pflip[2]))
4276 radeon_crtc_handle_flip(rdev, 2);
4277 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4278 DRM_DEBUG("IH: D3 vblank\n");
4281 case 1: /* D3 vline */
4282 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4283 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4284 DRM_DEBUG("IH: D3 vline\n");
4288 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4292 case 4: /* D4 vblank/vline */
4294 case 0: /* D4 vblank */
4295 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4296 if (rdev->irq.crtc_vblank_int[3]) {
4297 drm_handle_vblank(rdev->ddev, 3);
4298 rdev->pm.vblank_sync = true;
4299 DRM_WAKEUP(&rdev->irq.vblank_queue);
4301 if (atomic_read(&rdev->irq.pflip[3]))
4302 radeon_crtc_handle_flip(rdev, 3);
4303 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4304 DRM_DEBUG("IH: D4 vblank\n");
4307 case 1: /* D4 vline */
4308 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4309 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4310 DRM_DEBUG("IH: D4 vline\n");
4314 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4318 case 5: /* D5 vblank/vline */
4320 case 0: /* D5 vblank */
4321 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4322 if (rdev->irq.crtc_vblank_int[4]) {
4323 drm_handle_vblank(rdev->ddev, 4);
4324 rdev->pm.vblank_sync = true;
4325 DRM_WAKEUP(&rdev->irq.vblank_queue);
4327 if (atomic_read(&rdev->irq.pflip[4]))
4328 radeon_crtc_handle_flip(rdev, 4);
4329 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4330 DRM_DEBUG("IH: D5 vblank\n");
4333 case 1: /* D5 vline */
4334 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4335 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4336 DRM_DEBUG("IH: D5 vline\n");
4340 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4344 case 6: /* D6 vblank/vline */
4346 case 0: /* D6 vblank */
4347 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4348 if (rdev->irq.crtc_vblank_int[5]) {
4349 drm_handle_vblank(rdev->ddev, 5);
4350 rdev->pm.vblank_sync = true;
4351 DRM_WAKEUP(&rdev->irq.vblank_queue);
4353 if (atomic_read(&rdev->irq.pflip[5]))
4354 radeon_crtc_handle_flip(rdev, 5);
4355 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4356 DRM_DEBUG("IH: D6 vblank\n");
4359 case 1: /* D6 vline */
4360 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4361 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4362 DRM_DEBUG("IH: D6 vline\n");
4366 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4370 case 42: /* HPD hotplug */
4373 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4374 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4375 queue_hotplug = true;
4376 DRM_DEBUG("IH: HPD1\n");
4380 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4381 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4382 queue_hotplug = true;
4383 DRM_DEBUG("IH: HPD2\n");
4387 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4388 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4389 queue_hotplug = true;
4390 DRM_DEBUG("IH: HPD3\n");
4394 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4395 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4396 queue_hotplug = true;
4397 DRM_DEBUG("IH: HPD4\n");
4401 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4402 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4403 queue_hotplug = true;
4404 DRM_DEBUG("IH: HPD5\n");
4408 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4409 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4410 queue_hotplug = true;
4411 DRM_DEBUG("IH: HPD6\n");
4415 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4422 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4423 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4425 DRM_DEBUG("IH: HDMI0\n");
4429 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4430 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4432 DRM_DEBUG("IH: HDMI1\n");
4436 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4437 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4439 DRM_DEBUG("IH: HDMI2\n");
4443 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4444 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4446 DRM_DEBUG("IH: HDMI3\n");
4450 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4451 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4453 DRM_DEBUG("IH: HDMI4\n");
4457 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4458 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4460 DRM_DEBUG("IH: HDMI5\n");
4464 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4468 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4469 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4473 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4474 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4475 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4476 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4477 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4478 /* reset addr and status */
4479 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4481 case 176: /* CP_INT in ring buffer */
4482 case 177: /* CP_INT in IB1 */
4483 case 178: /* CP_INT in IB2 */
4484 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4485 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4487 case 181: /* CP EOP event */
4488 DRM_DEBUG("IH: CP EOP\n");
4489 if (rdev->family >= CHIP_CAYMAN) {
4492 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4495 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4498 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4502 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4504 case 224: /* DMA trap event */
4505 DRM_DEBUG("IH: DMA trap\n");
4506 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4508 case 233: /* GUI IDLE */
4509 DRM_DEBUG("IH: GUI idle\n");
4511 case 244: /* DMA trap event */
4512 if (rdev->family >= CHIP_CAYMAN) {
4513 DRM_DEBUG("IH: DMA1 trap\n");
4514 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4518 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4522 /* wptr/rptr are in bytes! */
4524 rptr &= rdev->ih.ptr_mask;
4527 taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
4529 taskqueue_enqueue(rdev->tq, &rdev->audio_work);
4530 rdev->ih.rptr = rptr;
4531 WREG32(IH_RB_RPTR, rdev->ih.rptr);
4532 atomic_set(&rdev->ih.lock, 0);
4534 /* make sure wptr hasn't changed while processing */
4535 wptr = evergreen_get_ih_wptr(rdev);
4543 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4545 * @rdev: radeon_device pointer
4546 * @fence: radeon fence object
4548 * Add a DMA fence packet to the ring to write
4549 * the fence seq number and DMA trap packet to generate
4550 * an interrupt if needed (evergreen-SI).
4552 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4553 struct radeon_fence *fence)
4555 struct radeon_ring *ring = &rdev->ring[fence->ring];
4556 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4557 /* write the fence */
4558 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4559 radeon_ring_write(ring, addr & 0xfffffffc);
4560 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4561 radeon_ring_write(ring, fence->seq);
4562 /* generate an interrupt */
4563 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4565 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4566 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4567 radeon_ring_write(ring, 1);
4571 * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4573 * @rdev: radeon_device pointer
4574 * @ib: IB object to schedule
4576 * Schedule an IB in the DMA ring (evergreen).
4578 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4579 struct radeon_ib *ib)
4581 struct radeon_ring *ring = &rdev->ring[ib->ring];
4583 if (rdev->wb.enabled) {
4584 u32 next_rptr = ring->wptr + 4;
4585 while ((next_rptr & 7) != 5)
4588 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
4589 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4590 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4591 radeon_ring_write(ring, next_rptr);
4594 /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4595 * Pad as necessary with NOPs.
4597 while ((ring->wptr & 7) != 5)
4598 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4599 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
4600 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4601 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4606 * evergreen_copy_dma - copy pages using the DMA engine
4608 * @rdev: radeon_device pointer
4609 * @src_offset: src GPU address
4610 * @dst_offset: dst GPU address
4611 * @num_gpu_pages: number of GPU pages to xfer
4612 * @fence: radeon fence object
4614 * Copy GPU paging using the DMA engine (evergreen-cayman).
4615 * Used by the radeon ttm implementation to move pages if
4616 * registered as the asic copy callback.
4618 int evergreen_copy_dma(struct radeon_device *rdev,
4619 uint64_t src_offset, uint64_t dst_offset,
4620 unsigned num_gpu_pages,
4621 struct radeon_fence **fence)
4623 struct radeon_semaphore *sem = NULL;
4624 int ring_index = rdev->asic->copy.dma_ring_index;
4625 struct radeon_ring *ring = &rdev->ring[ring_index];
4626 u32 size_in_dw, cur_size_in_dw;
4630 r = radeon_semaphore_create(rdev, &sem);
4632 DRM_ERROR("radeon: moving bo (%d).\n", r);
4636 size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4637 num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4638 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4640 DRM_ERROR("radeon: moving bo (%d).\n", r);
4641 radeon_semaphore_free(rdev, &sem, NULL);
4645 if (radeon_fence_need_sync(*fence, ring->idx)) {
4646 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4648 radeon_fence_note_sync(*fence, ring->idx);
4650 radeon_semaphore_free(rdev, &sem, NULL);
4653 for (i = 0; i < num_loops; i++) {
4654 cur_size_in_dw = size_in_dw;
4655 if (cur_size_in_dw > 0xFFFFF)
4656 cur_size_in_dw = 0xFFFFF;
4657 size_in_dw -= cur_size_in_dw;
4658 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
4659 radeon_ring_write(ring, dst_offset & 0xfffffffc);
4660 radeon_ring_write(ring, src_offset & 0xfffffffc);
4661 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4662 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4663 src_offset += cur_size_in_dw * 4;
4664 dst_offset += cur_size_in_dw * 4;
4667 r = radeon_fence_emit(rdev, fence, ring->idx);
4669 radeon_ring_unlock_undo(rdev, ring);
4673 radeon_ring_unlock_commit(rdev, ring);
4674 radeon_semaphore_free(rdev, &sem, *fence);
4679 static int evergreen_startup(struct radeon_device *rdev)
4681 struct radeon_ring *ring;
4684 /* enable pcie gen2 link */
4685 evergreen_pcie_gen2_enable(rdev);
4687 if (ASIC_IS_DCE5(rdev)) {
4688 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4689 r = ni_init_microcode(rdev);
4691 DRM_ERROR("Failed to load firmware!\n");
4695 r = ni_mc_load_microcode(rdev);
4697 DRM_ERROR("Failed to load MC firmware!\n");
4701 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4702 r = r600_init_microcode(rdev);
4704 DRM_ERROR("Failed to load firmware!\n");
4710 r = r600_vram_scratch_init(rdev);
4714 evergreen_mc_program(rdev);
4715 if (rdev->flags & RADEON_IS_AGP) {
4716 evergreen_agp_enable(rdev);
4718 r = evergreen_pcie_gart_enable(rdev);
4722 evergreen_gpu_init(rdev);
4724 r = evergreen_blit_init(rdev);
4726 r600_blit_fini(rdev);
4727 rdev->asic->copy.copy = NULL;
4728 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4731 /* allocate wb buffer */
4732 r = radeon_wb_init(rdev);
4736 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4738 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4742 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4744 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4748 r = rv770_uvd_resume(rdev);
4750 r = radeon_fence_driver_start_ring(rdev,
4751 R600_RING_TYPE_UVD_INDEX);
4753 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4757 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4760 if (!rdev->irq.installed) {
4761 r = radeon_irq_kms_init(rdev);
4766 r = r600_irq_init(rdev);
4768 DRM_ERROR("radeon: IH init failed (%d).\n", r);
4769 radeon_irq_kms_fini(rdev);
4772 evergreen_irq_set(rdev);
4774 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4775 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4776 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4777 0, 0xfffff, RADEON_CP_PACKET2);
4781 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4782 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4783 DMA_RB_RPTR, DMA_RB_WPTR,
4784 2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4788 r = evergreen_cp_load_microcode(rdev);
4791 r = evergreen_cp_resume(rdev);
4794 r = r600_dma_resume(rdev);
4798 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4799 if (ring->ring_size) {
4800 r = radeon_ring_init(rdev, ring, ring->ring_size,
4801 R600_WB_UVD_RPTR_OFFSET,
4802 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4803 0, 0xfffff, RADEON_CP_PACKET2);
4805 r = r600_uvd_init(rdev);
4808 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4811 r = radeon_ib_pool_init(rdev);
4813 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4817 r = r600_audio_init(rdev);
4819 DRM_ERROR("radeon: audio init failed\n");
4826 int evergreen_resume(struct radeon_device *rdev)
4830 /* reset the asic, the gfx blocks are often in a bad state
4831 * after the driver is unloaded or after a resume
4833 if (radeon_asic_reset(rdev))
4834 dev_warn(rdev->dev, "GPU reset failed !\n");
4835 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4836 * posting will perform necessary task to bring back GPU into good
4840 atom_asic_init(rdev->mode_info.atom_context);
4842 /* init golden registers */
4843 evergreen_init_golden_registers(rdev);
4845 rdev->accel_working = true;
4846 r = evergreen_startup(rdev);
4848 DRM_ERROR("evergreen startup failed on resume\n");
4849 rdev->accel_working = false;
4857 int evergreen_suspend(struct radeon_device *rdev)
4859 r600_audio_fini(rdev);
4860 radeon_uvd_suspend(rdev);
4862 r600_dma_stop(rdev);
4863 r600_uvd_rbc_stop(rdev);
4864 evergreen_irq_suspend(rdev);
4865 radeon_wb_disable(rdev);
4866 evergreen_pcie_gart_disable(rdev);
4871 /* Plan is to move initialization in that function and use
4872 * helper function so that radeon_device_init pretty much
4873 * do nothing more than calling asic specific function. This
4874 * should also allow to remove a bunch of callback function
4877 int evergreen_init(struct radeon_device *rdev)
4882 if (!radeon_get_bios(rdev)) {
4883 if (ASIC_IS_AVIVO(rdev))
4886 /* Must be an ATOMBIOS */
4887 if (!rdev->is_atom_bios) {
4888 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
4891 r = radeon_atombios_init(rdev);
4894 /* reset the asic, the gfx blocks are often in a bad state
4895 * after the driver is unloaded or after a resume
4897 if (radeon_asic_reset(rdev))
4898 dev_warn(rdev->dev, "GPU reset failed !\n");
4899 /* Post card if necessary */
4900 if (!radeon_card_posted(rdev)) {
4902 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4905 DRM_INFO("GPU not posted. posting now...\n");
4906 atom_asic_init(rdev->mode_info.atom_context);
4908 /* init golden registers */
4909 evergreen_init_golden_registers(rdev);
4910 /* Initialize scratch registers */
4911 r600_scratch_init(rdev);
4912 /* Initialize surface registers */
4913 radeon_surface_init(rdev);
4914 /* Initialize clocks */
4915 radeon_get_clock_info(rdev->ddev);
4917 r = radeon_fence_driver_init(rdev);
4920 /* initialize AGP */
4921 if (rdev->flags & RADEON_IS_AGP) {
4922 r = radeon_agp_init(rdev);
4924 radeon_agp_disable(rdev);
4926 /* initialize memory controller */
4927 r = evergreen_mc_init(rdev);
4930 /* Memory manager */
4931 r = radeon_bo_init(rdev);
4935 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4936 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
4938 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4939 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4941 r = radeon_uvd_init(rdev);
4943 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4944 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4948 rdev->ih.ring_obj = NULL;
4949 r600_ih_ring_init(rdev, 64 * 1024);
4951 r = r600_pcie_gart_init(rdev);
4955 rdev->accel_working = true;
4956 r = evergreen_startup(rdev);
4958 dev_err(rdev->dev, "disabling GPU acceleration\n");
4960 r600_dma_fini(rdev);
4961 r600_irq_fini(rdev);
4962 radeon_wb_fini(rdev);
4963 radeon_ib_pool_fini(rdev);
4964 radeon_irq_kms_fini(rdev);
4965 evergreen_pcie_gart_fini(rdev);
4966 rdev->accel_working = false;
4969 /* Don't start up if the MC ucode is missing on BTC parts.
4970 * The default clocks and voltages before the MC ucode
4971 * is loaded are not suffient for advanced operations.
4973 if (ASIC_IS_DCE5(rdev)) {
4974 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4975 DRM_ERROR("radeon: MC ucode required for NI+.\n");
4983 void evergreen_fini(struct radeon_device *rdev)
4985 r600_audio_fini(rdev);
4986 r600_blit_fini(rdev);
4988 r600_dma_fini(rdev);
4989 r600_irq_fini(rdev);
4990 radeon_wb_fini(rdev);
4991 radeon_ib_pool_fini(rdev);
4992 radeon_irq_kms_fini(rdev);
4993 evergreen_pcie_gart_fini(rdev);
4994 radeon_uvd_fini(rdev);
4995 r600_vram_scratch_fini(rdev);
4996 radeon_gem_fini(rdev);
4997 radeon_fence_driver_fini(rdev);
4998 radeon_agp_fini(rdev);
4999 radeon_bo_fini(rdev);
5000 radeon_atombios_fini(rdev);
5001 if (ASIC_IS_DCE5(rdev))
5002 ni_fini_microcode(rdev);
5004 r600_fini_microcode(rdev);
5005 drm_free(rdev->bios, M_DRM);
5009 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5011 u32 link_width_cntl, speed_cntl, mask;
5014 if (radeon_pcie_gen2 == 0)
5017 if (rdev->flags & RADEON_IS_IGP)
5020 if (!(rdev->flags & RADEON_IS_PCIE))
5023 /* x2 cards have a special sequence */
5024 if (ASIC_IS_X2(rdev))
5027 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5031 if (!(mask & DRM_PCIE_SPEED_50))
5034 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5035 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5036 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5040 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5042 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5043 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5045 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5046 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5047 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5049 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5050 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5051 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5053 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5054 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5055 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5057 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5058 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5059 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5061 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5062 speed_cntl |= LC_GEN2_EN_STRAP;
5063 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5066 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5067 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5069 link_width_cntl |= LC_UPCONFIGURE_DIS;
5071 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5072 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);